flow3rdown commited on
Commit
21269d7
1 Parent(s): 16bab0d

Add inference module

Browse files
app.py CHANGED
@@ -1,7 +1,112 @@
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  def single_inference_iit(head_img, head_id, tail_img, tail_id, question_txt, question_id):
4
- return question_txt
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
  def single_inference_tti(head_txt, head_id, tail_txt, tail_id, question_img, question_id):
7
  return head_txt
@@ -53,20 +158,20 @@ def single_tab_tti():
53
  tail_text = gr.Textbox(lines=1, label="Tail Name")
54
  tail_ent = gr.Textbox(lines=1, label="Tail Entity")
55
  with gr.Column():
56
- question_text = gr.Image(type='pil', label="Question Image")
57
  question_ent = gr.Textbox(lines=1, label="Question Entity")
58
  submit_btn = gr.Button("Submit")
59
  output_text = gr.Textbox(label="Output")
60
 
61
  submit_btn.click(fn=single_inference_iit,
62
- inputs=[head_text, head_ent, tail_text, tail_ent, question_text, question_ent],
63
  outputs=[output_text])
64
 
65
  examples=[['qinghai_lake', 'Q201294', 'inland_lake', 'Q31805992', 'examples/qinghai_lake.png', 'Q18812548']]
66
  ex = gr.Examples(
67
  examples=examples,
68
  fn=single_inference_iit,
69
- inputs=[head_text, head_ent, tail_text, tail_ent, question_text, question_ent],
70
  outputs=[output_text],
71
  cache_examples=True,
72
  run_on_click=True
 
1
  import gradio as gr
2
+ import torch
3
+ from torch import nn
4
+ from huggingface_hub import hf_hub_download
5
+ from transformers import BertModel, BertTokenizer, CLIPModel, BertConfig, CLIPConfig, CLIPProcessor
6
+ from modeling_unimo import UnimoForMaskedLM
7
+
8
+ def load_dict_text(path):
9
+ with open(path, 'r') as f:
10
+ load_data = {}
11
+ lines = f.readlines()
12
+ for line in lines:
13
+ key, value = line.split('\t')
14
+ load_data[key] = value.replace('\n', '')
15
+ return load_data
16
+
17
+ def load_text(path):
18
+ with open(path, 'r') as f:
19
+ lines = f.readlines()
20
+ load_data = []
21
+ for line in lines:
22
+ load_data.append(line.strip().replace('\n', ''))
23
+ return load_data
24
+
25
+ class MKGformerModel(nn.Module):
26
+ def __init__(self, text_config, vision_config):
27
+ super().__init__()
28
+ self.model = UnimoForMaskedLM(text_config, vision_config)
29
+
30
+ def farword(self, batch):
31
+ return self.model(**batch, return_dict=True)
32
+
33
+ # tokenizer
34
+ tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
35
+
36
+ # entity and relation
37
+ ent2text = load_dict_text('./dataset/MarKG/entity2text.txt')
38
+ rel2text = load_dict_text('./dataset/MarKG/relation2text.txt')
39
+ analogy_entities = load_text('./dataset/MARS/analogy_entities.txt')
40
+ analogy_relations = load_text('./dataset/MARS/analogy_relations.txt')
41
+ ent2description = load_dict_text('./dataset/MarKG/entity2textlong.txt')
42
+
43
+ text2ent = {text: ent for ent, text in ent2text.items()}
44
+ ent2token = {ent: f"[ENTITY_{i}]" for i, ent in enumerate(ent2description)}
45
+ rel2token = {rel: f"[RELATION_{i}]" for i, rel in enumerate(rel2text)}
46
+ analogy_ent2token = {ent : f"[ENTITY_{i}]" for i, ent in enumerate(ent2description) if ent in analogy_entities}
47
+ analogy_rel2token = {rel : f"[RELATION_{i}]" for i, rel in enumerate(rel2text) if rel in analogy_relations}
48
+ entity_list = list(ent2token.values())
49
+ relation_list = list(rel2token.values())
50
+ analogy_ent_list = list(analogy_ent2token.values())
51
+ analogy_rel_list = list(analogy_rel2token.values())
52
+
53
+ num_added_tokens = tokenizer.add_special_tokens({'additional_special_tokens': entity_list})
54
+ num_added_tokens = tokenizer.add_special_tokens({'additional_special_tokens': relation_list})
55
+
56
+ vocab = tokenizer.get_added_vocab() # dict: word: idx
57
+ relation_id_st = vocab[relation_list[0]]
58
+ relation_id_ed = vocab[relation_list[-1]] + 1
59
+ entity_id_st = vocab[entity_list[0]]
60
+ entity_id_ed = vocab[entity_list[-1]] + 1
61
+
62
+ # analogy entities and relations
63
+ analogy_entity_ids = [vocab[ent] for ent in analogy_ent_list]
64
+ analogy_relation_ids = [vocab[rel] for rel in analogy_rel_list]
65
+ num_added_tokens = tokenizer.add_special_tokens({'additional_special_tokens': ["[R]"]})
66
+
67
+ # model
68
+ checkpoint_path = hf_hub_download(repo_id='flow3rdown/mkgformer_mart_ft', filename="mkgformer_mart_ft", repo_type='model')
69
+ clip_config = CLIPConfig.from_pretrained('openai/clip-vit-base-patch32').vision_config
70
+ clip_config.device = 'cpu'
71
+ bert_config = BertConfig.from_pretrained('bert-base-uncased')
72
+ mkgformer = MKGformerModel(clip_config, bert_config)
73
+ mkgformer.model.resize_token_embeddings(len(tokenizer))
74
+
75
+ mkgformer.load_state_dict(torch.load(checkpoint_path, map_location='cpu')["state_dict"])
76
+
77
+ # processor
78
+ processor = CLIPProcessor.from_pretrained('openai/clip-vit-base-patch32')
79
+
80
 
81
  def single_inference_iit(head_img, head_id, tail_img, tail_id, question_txt, question_id):
82
+ # (I, I) -> (T, ?)
83
+ head_ent_text, tail_ent_text = ent2description[head_id], ent2description[tail_id]
84
+
85
+ inputs = tokenizer(
86
+ tokenizer.sep_token.join([analogy_ent2token[head_id] + " " + head_ent_text, "[R] ", analogy_ent2token[tail_id] + " " + tail_ent_text]),
87
+ tokenizer.sep_token.join([analogy_ent2token[question_id] + " ", "[R] ", "[MASK]"]),
88
+ truncation="longest_first", max_length=128, padding="longest", return_tensors='pt', add_special_tokens=True)
89
+ sep_idx = [[i for i, ids in enumerate(input_ids) if ids == tokenizer.sep_token_id] for input_ids in inputs['input_ids']]
90
+ inputs['sep_idx'] = torch.tensor(sep_idx)
91
+ inputs['attention_mask'] = inputs['attention_mask'].unsqueeze(1).expand([inputs['input_ids'].size(0), inputs['input_ids'].size(1), inputs['input_ids'].size(1)]).clone()
92
+ for i, idx in enumerate(sep_idx):
93
+ inputs['attention_mask'][i, :idx[2], idx[2]:] = 0
94
+
95
+ # image
96
+ pixel_values = processor(images=[head_img, tail_img], return_tensors='pt')['pixel_values'].squeeze()
97
+ inputs['pixel_values'] = pixel_values.unsqueeze(0)
98
+
99
+ input_ids = inputs['input_ids']
100
+
101
+ model_output = mkgformer.model(**inputs, return_dict=True)
102
+ logits = model_output[0].logits
103
+ bsz = input_ids.shape[0]
104
+
105
+ _, mask_idx = (input_ids == tokenizer.mask_token_id).nonzero(as_tuple=True) # bsz
106
+ mask_logits = logits[torch.arange(bsz), mask_idx][:, analogy_entity_ids] # bsz, 1, entity
107
+ answer = ent2text[list(analogy_ent2token.keys())[mask_logits.argmax().item()]]
108
+
109
+ return answer
110
 
111
  def single_inference_tti(head_txt, head_id, tail_txt, tail_id, question_img, question_id):
112
  return head_txt
 
158
  tail_text = gr.Textbox(lines=1, label="Tail Name")
159
  tail_ent = gr.Textbox(lines=1, label="Tail Entity")
160
  with gr.Column():
161
+ question_image = gr.Image(type='pil', label="Question Image")
162
  question_ent = gr.Textbox(lines=1, label="Question Entity")
163
  submit_btn = gr.Button("Submit")
164
  output_text = gr.Textbox(label="Output")
165
 
166
  submit_btn.click(fn=single_inference_iit,
167
+ inputs=[head_text, head_ent, tail_text, tail_ent, question_image, question_ent],
168
  outputs=[output_text])
169
 
170
  examples=[['qinghai_lake', 'Q201294', 'inland_lake', 'Q31805992', 'examples/qinghai_lake.png', 'Q18812548']]
171
  ex = gr.Examples(
172
  examples=examples,
173
  fn=single_inference_iit,
174
+ inputs=[head_text, head_ent, tail_text, tail_ent, question_image, question_ent],
175
  outputs=[output_text],
176
  cache_examples=True,
177
  run_on_click=True
dataset/MARS/analogy_entities.txt ADDED
@@ -0,0 +1,2063 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Q838811
2
+ Q184840
3
+ Q998
4
+ Q165447
5
+ Q495304
6
+ Q2935
7
+ Q103129
8
+ Q217602
9
+ Q8434
10
+ Q14373
11
+ Q100293148
12
+ Q169031
13
+ Q220869
14
+ Q2329
15
+ Q6004788
16
+ Q974
17
+ Q283233
18
+ Q838312
19
+ Q4690798
20
+ Q1566593
21
+ Q125191
22
+ Q21552830
23
+ Q42848
24
+ Q12819564
25
+ Q10786776
26
+ Q3372957
27
+ Q9158768
28
+ Q235155
29
+ Q327245
30
+ Q44705078
31
+ Q753
32
+ Q1196408
33
+ Q3051005
34
+ Q56139
35
+ Q233861
36
+ Q339353
37
+ Q8799
38
+ Q20129
39
+ Q7873
40
+ Q42764222
41
+ Q11410
42
+ Q188055
43
+ Q109564569
44
+ Q25497
45
+ Q30461
46
+ Q2290980
47
+ Q5242962
48
+ Q3732574
49
+ Q6497624
50
+ Q180684
51
+ Q7242
52
+ Q1650915
53
+ Q44432
54
+ Q2665615
55
+ Q3325266
56
+ Q170790
57
+ Q2381698
58
+ Q488383
59
+ Q43177
60
+ Q1760818
61
+ Q81171102
62
+ Q11451
63
+ Q16572
64
+ Q6620231
65
+ Q1292119
66
+ Q188869
67
+ Q25956
68
+ Q1339255
69
+ Q15328
70
+ Q40357
71
+ Q179904
72
+ Q1196314
73
+ Q1250916
74
+ Q7235103
75
+ Q20723554
76
+ Q102397207
77
+ Q36496
78
+ Q1416279
79
+ Q12348865
80
+ Q8047
81
+ Q574433
82
+ Q865588
83
+ Q602854
84
+ Q221836
85
+ Q1466064
86
+ Q244158
87
+ Q193432
88
+ Q1435365
89
+ Q234129
90
+ Q14212
91
+ Q1544262
92
+ Q21208
93
+ Q7111875
94
+ Q11034
95
+ Q2142250
96
+ Q41796
97
+ Q13189320
98
+ Q6555422
99
+ Q7889
100
+ Q15729017
101
+ Q7978035
102
+ Q173950
103
+ Q1396399
104
+ Q2751223
105
+ Q1050405
106
+ Q7366
107
+ Q16533
108
+ Q331481
109
+ Q629
110
+ Q97502608
111
+ Q49850
112
+ Q45190
113
+ Q5046723
114
+ Q599151
115
+ Q93184
116
+ Q6072584
117
+ Q877998
118
+ Q627
119
+ Q8386
120
+ Q24885626
121
+ Q2811064
122
+ Q209
123
+ Q9368
124
+ Q191768
125
+ Q123509
126
+ Q988108
127
+ Q200253
128
+ Q21818619
129
+ Q1243001
130
+ Q42302
131
+ Q270380
132
+ Q506
133
+ Q337907
134
+ Q23595
135
+ Q45085932
136
+ Q109246805
137
+ Q81944
138
+ Q869095
139
+ Q214649
140
+ Q208021
141
+ Q573952
142
+ Q7380
143
+ Q3962
144
+ Q618710
145
+ Q33669098
146
+ Q23444
147
+ Q2118942
148
+ Q40260
149
+ Q132911
150
+ Q14328596
151
+ Q6607
152
+ Q200539
153
+ Q36963
154
+ Q60415975
155
+ Q20738981
156
+ Q101097118
157
+ Q82794
158
+ Q111653591
159
+ Q15805316
160
+ Q5151
161
+ Q1599148
162
+ Q370185
163
+ Q7617440
164
+ Q7748
165
+ Q20490867
166
+ Q51
167
+ Q18162636
168
+ Q40435
169
+ Q14388
170
+ Q112597033
171
+ Q22656
172
+ Q19758137
173
+ Q55604566
174
+ Q16887036
175
+ Q186150
176
+ Q3050175
177
+ Q646
178
+ Q27496
179
+ Q3406654
180
+ Q185583
181
+ Q7850
182
+ Q168002
183
+ Q7860
184
+ Q23009
185
+ Q65
186
+ Q44782
187
+ Q16003532
188
+ Q23540
189
+ Q15551438
190
+ Q756
191
+ Q1720648
192
+ Q947686
193
+ Q856887
194
+ Q110155210
195
+ Q3114762
196
+ Q8087
197
+ Q1475429
198
+ Q18754
199
+ Q80157
200
+ Q26377667
201
+ Q2526135
202
+ Q80024808
203
+ Q57216
204
+ Q25372
205
+ Q1460233
206
+ Q40080
207
+ Q12334336
208
+ Q2074836
209
+ Q25101745
210
+ Q718570
211
+ Q182153
212
+ Q11620540
213
+ Q282
214
+ Q42948
215
+ Q221673
216
+ Q34095
217
+ Q4681350
218
+ Q103827699
219
+ Q7561
220
+ Q98089548
221
+ Q2236563
222
+ Q109121530
223
+ Q212198
224
+ Q96622169
225
+ Q236371
226
+ Q38127868
227
+ Q61285
228
+ Q5119
229
+ Q165233
230
+ Q38645
231
+ Q4795846
232
+ Q39073224
233
+ Q13119823
234
+ Q81895
235
+ Q783794
236
+ Q193666
237
+ Q38695
238
+ Q14332
239
+ Q129026
240
+ Q831735
241
+ Q104439289
242
+ Q107008077
243
+ Q846664
244
+ Q1263003
245
+ Q29485
246
+ Q14674
247
+ Q180846
248
+ Q986291
249
+ Q9266
250
+ Q211521
251
+ Q1127296
252
+ Q40458125
253
+ Q60315954
254
+ Q61503220
255
+ Q37517
256
+ Q105764136
257
+ Q185789
258
+ Q2125243
259
+ Q16523690
260
+ Q1001059
261
+ Q6279182
262
+ Q133500
263
+ Q679300
264
+ Q296955
265
+ Q901785
266
+ Q76115
267
+ Q80973
268
+ Q7547763
269
+ Q107383376
270
+ Q103838588
271
+ Q1630633
272
+ Q160232
273
+ Q633625
274
+ Q191851
275
+ Q16875619
276
+ Q18185
277
+ Q65510617
278
+ Q919348
279
+ Q36649
280
+ Q331710
281
+ Q7391292
282
+ Q2348
283
+ Q6303561
284
+ Q165714
285
+ Q986177
286
+ Q45867
287
+ Q27686
288
+ Q1470757
289
+ Q171187
290
+ Q18001597
291
+ Q614467
292
+ Q1620031
293
+ Q5893251
294
+ Q34396
295
+ Q245005
296
+ Q16874635
297
+ Q7369
298
+ Q170658
299
+ Q37156731
300
+ Q183129
301
+ Q11631
302
+ Q14400
303
+ Q107364261
304
+ Q25288
305
+ Q286
306
+ Q5283
307
+ Q18812548
308
+ Q21655367
309
+ Q31808687
310
+ Q17562059
311
+ Q323840
312
+ Q63677013
313
+ Q132744
314
+ Q131263
315
+ Q43229
316
+ Q122508
317
+ Q1156854
318
+ Q1573122
319
+ Q91165
320
+ Q22657
321
+ Q73371931
322
+ Q111084907
323
+ Q178794
324
+ Q104882831
325
+ Q39911916
326
+ Q581459
327
+ Q7378
328
+ Q11707
329
+ Q747810
330
+ Q853058
331
+ Q134808
332
+ Q37654
333
+ Q276099
334
+ Q20862847
335
+ Q4668171
336
+ Q21955018
337
+ Q305418
338
+ Q83363
339
+ Q24897655
340
+ Q152
341
+ Q56274905
342
+ Q1137365
343
+ Q876776
344
+ Q1824165
345
+ Q179723
346
+ Q22676
347
+ Q112578360
348
+ Q44
349
+ Q203415
350
+ Q1907525
351
+ Q683906
352
+ Q211086
353
+ Q30
354
+ Q1458430
355
+ Q16938807
356
+ Q2986261
357
+ Q4259259
358
+ Q3308178
359
+ Q9121
360
+ Q17054224
361
+ Q184754
362
+ Q8514
363
+ Q1153471
364
+ Q2743689
365
+ Q687900
366
+ Q23883
367
+ Q178561
368
+ Q1064858
369
+ Q17319881
370
+ Q7406919
371
+ Q6663
372
+ Q496696
373
+ Q22667
374
+ Q23538
375
+ Q621114
376
+ Q38166
377
+ Q1469340
378
+ Q851782
379
+ Q148
380
+ Q181600
381
+ Q54803638
382
+ Q46999639
383
+ Q551997
384
+ Q64634437
385
+ Q170412
386
+ Q25653
387
+ Q28298
388
+ Q41217
389
+ Q25107
390
+ Q189393
391
+ Q6813432
392
+ Q6266
393
+ Q207832
394
+ Q11006
395
+ Q499916
396
+ Q383973
397
+ Q3217573
398
+ Q83093
399
+ Q20706561
400
+ Q41176
401
+ Q11413
402
+ Q40128349
403
+ Q168247
404
+ Q42329
405
+ Q1758037
406
+ Q3071551
407
+ Q580939
408
+ Q1057
409
+ Q9128
410
+ Q8074
411
+ Q63013258
412
+ Q178898
413
+ Q44235
414
+ Q1770
415
+ Q65940913
416
+ Q11474
417
+ Q127956
418
+ Q107246611
419
+ Q56056305
420
+ Q4284971
421
+ Q162
422
+ Q132821
423
+ Q7414
424
+ Q9430
425
+ Q37105
426
+ Q7569
427
+ Q2565924
428
+ Q9257166
429
+ Q206615
430
+ Q38
431
+ Q9482
432
+ Q345367
433
+ Q4233325
434
+ Q106476041
435
+ Q165947
436
+ Q1758639
437
+ Q1004
438
+ Q42295
439
+ Q4197743
440
+ Q107342
441
+ Q523
442
+ Q1595418
443
+ Q15729598
444
+ Q788926
445
+ Q7747542
446
+ Q13159882
447
+ Q60
448
+ Q55876931
449
+ Q773108
450
+ Q3306164
451
+ Q193605
452
+ Q73545549
453
+ Q834191
454
+ Q335234
455
+ Q34956
456
+ Q12280
457
+ Q83437
458
+ Q814207
459
+ Q4116742
460
+ Q11422
461
+ Q105507
462
+ Q11042
463
+ Q82604
464
+ Q213142
465
+ Q1269
466
+ Q194112
467
+ Q20737431
468
+ Q96093619
469
+ Q1484064
470
+ Q363931
471
+ Q134052
472
+ Q1277575
473
+ Q12370
474
+ Q109581753
475
+ Q105731
476
+ Q2215554
477
+ Q107998873
478
+ Q208160
479
+ Q111694088
480
+ Q610190
481
+ Q849964
482
+ Q604748
483
+ Q58093
484
+ Q3140971
485
+ Q375102
486
+ Q643352
487
+ Q336751
488
+ Q131656
489
+ Q15332375
490
+ Q37241976
491
+ Q160342
492
+ Q488798
493
+ Q17200001
494
+ Q37129
495
+ Q11432
496
+ Q177634
497
+ Q196
498
+ Q118093
499
+ Q189737
500
+ Q33971
501
+ Q8486
502
+ Q4503831
503
+ Q34876
504
+ Q814232
505
+ Q1241356
506
+ Q185939
507
+ Q34577
508
+ Q1223829
509
+ Q58157328
510
+ Q9005258
511
+ Q4781618
512
+ Q12876
513
+ Q112805200
514
+ Q2514663
515
+ Q1308978
516
+ Q192795
517
+ Q1867507
518
+ Q1088
519
+ Q3649129
520
+ Q18573407
521
+ Q11898832
522
+ Q63769412
523
+ Q45912917
524
+ Q182849
525
+ Q330190
526
+ Q120997
527
+ Q40015
528
+ Q18113858
529
+ Q234460
530
+ Q106619054
531
+ Q81938
532
+ Q9690
533
+ Q7367
534
+ Q26256512
535
+ Q26359826
536
+ Q1334805
537
+ Q133235
538
+ Q3380760
539
+ Q641118
540
+ Q24245823
541
+ Q54453625
542
+ Q48816342
543
+ Q2001588
544
+ Q1928899
545
+ Q489157
546
+ Q168639
547
+ Q62562253
548
+ Q1248784
549
+ Q778384
550
+ Q605035
551
+ Q1210093
552
+ Q151885
553
+ Q15838080
554
+ Q104813442
555
+ Q46335
556
+ Q81707
557
+ Q107383247
558
+ Q79007
559
+ Q1068107
560
+ Q2827871
561
+ Q1755355
562
+ Q210112
563
+ Q355304
564
+ Q2364697
565
+ Q11639276
566
+ Q4409456
567
+ Q47508768
568
+ Q10858674
569
+ Q190478
570
+ Q201959
571
+ Q834007
572
+ Q131172
573
+ Q496380
574
+ Q4989906
575
+ Q4262
576
+ Q177923
577
+ Q11471
578
+ Q48282
579
+ Q36864
580
+ Q5185279
581
+ Q79935565
582
+ Q29838581
583
+ Q51377271
584
+ Q1148747
585
+ Q40858
586
+ Q21190816
587
+ Q50000
588
+ Q109507868
589
+ Q73006538
590
+ Q1521410
591
+ Q821952
592
+ Q47462850
593
+ Q173056
594
+ Q3010205
595
+ Q1247867
596
+ Q182155
597
+ Q781
598
+ Q256458
599
+ Q190139
600
+ Q61071643
601
+ Q508291
602
+ Q1073656
603
+ Q6460735
604
+ Q38933
605
+ Q39816
606
+ Q7377
607
+ Q6028924
608
+ Q131512
609
+ Q2424752
610
+ Q1498298
611
+ Q46311
612
+ Q160695
613
+ Q2845
614
+ Q81240
615
+ Q386905
616
+ Q2047589
617
+ Q3196
618
+ Q18335
619
+ Q2727213
620
+ Q47263
621
+ Q25894
622
+ Q3241972
623
+ Q1997
624
+ Q326277
625
+ Q11994045
626
+ Q37555509
627
+ Q14403
628
+ Q2995529
629
+ Q105428070
630
+ Q131696
631
+ Q74424273
632
+ Q37540140
633
+ Q1799866
634
+ Q200538
635
+ Q2645227
636
+ Q255722
637
+ Q2570643
638
+ Q18031357
639
+ Q127933
640
+ Q4290
641
+ Q171558
642
+ Q1866689
643
+ Q30100868
644
+ Q37828
645
+ Q1780863
646
+ Q12970360
647
+ Q1183543
648
+ Q46831
649
+ Q198
650
+ Q1105534
651
+ Q21171262
652
+ Q386854
653
+ Q18237485
654
+ Q1368
655
+ Q4173974
656
+ Q8104
657
+ Q182458
658
+ Q1056396
659
+ Q1426795
660
+ Q2063640
661
+ Q380933
662
+ Q2486664
663
+ Q1937716
664
+ Q23834
665
+ Q84
666
+ Q36641511
667
+ Q11032
668
+ Q1947892
669
+ Q290178
670
+ Q101761
671
+ Q4022
672
+ Q7140693
673
+ Q446
674
+ Q3216816
675
+ Q1066536
676
+ Q38926
677
+ Q4421
678
+ Q5432619
679
+ Q7562091
680
+ Q1006733
681
+ Q2011889
682
+ Q1914636
683
+ Q2022532
684
+ Q1379116
685
+ Q831691
686
+ Q4604
687
+ Q43059
688
+ Q20755687
689
+ Q4917
690
+ Q32738
691
+ Q106106
692
+ Q173799
693
+ Q126017
694
+ Q77073841
695
+ Q317088
696
+ Q5
697
+ Q11587
698
+ Q28803
699
+ Q121769
700
+ Q14745
701
+ Q188828
702
+ Q160120
703
+ Q2748405
704
+ Q1746015
705
+ Q1305037
706
+ Q1401371
707
+ Q160070
708
+ Q492768
709
+ Q1073
710
+ Q7743
711
+ Q1310239
712
+ Q3535686
713
+ Q83790
714
+ Q208364
715
+ Q840396
716
+ Q59002477
717
+ Q11469
718
+ Q350176
719
+ Q43091
720
+ Q16917
721
+ Q57052
722
+ Q182832
723
+ Q22889
724
+ Q107262066
725
+ Q76768
726
+ Q170544
727
+ Q1424833
728
+ Q133279
729
+ Q212989
730
+ Q33260112
731
+ Q86642263
732
+ Q185245
733
+ Q190771
734
+ Q4229435
735
+ Q843589
736
+ Q1344
737
+ Q199657
738
+ Q667982
739
+ Q643546
740
+ Q50690
741
+ Q5413
742
+ Q211198
743
+ Q11426
744
+ Q12271
745
+ Q35808
746
+ Q7188
747
+ Q181394
748
+ Q3950
749
+ Q37319371
750
+ Q13189
751
+ Q121359
752
+ Q9649
753
+ Q43015
754
+ Q161439
755
+ Q937228
756
+ Q11427
757
+ Q2105114
758
+ Q1390
759
+ Q1107811
760
+ Q3966
761
+ Q14890
762
+ Q12162227
763
+ Q31948
764
+ Q366
765
+ Q11582
766
+ Q1937808
767
+ Q331769
768
+ Q532
769
+ Q79871
770
+ Q905151
771
+ Q7994710
772
+ Q18153484
773
+ Q11326182
774
+ Q15989253
775
+ Q106765930
776
+ Q2301186
777
+ Q779
778
+ Q76664785
779
+ Q157017
780
+ Q8016240
781
+ Q3345385
782
+ Q649
783
+ Q670
784
+ Q5994
785
+ Q159
786
+ Q192628
787
+ Q75520
788
+ Q2934
789
+ Q36814998
790
+ Q1297322
791
+ Q5003624
792
+ Q8928
793
+ Q193762
794
+ Q178692
795
+ Q208417
796
+ Q2028499
797
+ Q82955
798
+ Q1357761
799
+ Q194195
800
+ Q86135347
801
+ Q34679
802
+ Q13428690
803
+ Q16835776
804
+ Q1207302
805
+ Q501862
806
+ Q16528
807
+ Q131269
808
+ Q3314483
809
+ Q1107656
810
+ Q20665666
811
+ Q19973
812
+ Q899277
813
+ Q752392
814
+ Q223722
815
+ Q650711
816
+ Q197
817
+ Q1198887
818
+ Q41607
819
+ Q186263
820
+ Q283202
821
+ Q103312200
822
+ Q214609
823
+ Q104778027
824
+ Q132241
825
+ Q5283295
826
+ Q96326026
827
+ Q178559
828
+ Q34704992
829
+ Q111122056
830
+ Q1414816
831
+ Q184755
832
+ Q5249659
833
+ Q16952
834
+ Q2824657
835
+ Q60528728
836
+ Q5462027
837
+ Q1186710
838
+ Q51379512
839
+ Q35765
840
+ Q4
841
+ Q8805
842
+ Q65375807
843
+ Q6657015
844
+ Q107425
845
+ Q189838
846
+ Q5157576
847
+ Q29017603
848
+ Q79030196
849
+ Q194189
850
+ Q177749
851
+ Q333291
852
+ Q4895508
853
+ Q5926364
854
+ Q1083709
855
+ Q30092769
856
+ Q27898977
857
+ Q57598
858
+ Q37211094
859
+ Q6073879
860
+ Q201294
861
+ Q169545
862
+ Q31839438
863
+ Q12139612
864
+ Q22671
865
+ Q11019
866
+ Q1151752
867
+ Q468402
868
+ Q1550225
869
+ Q202251
870
+ Q222249
871
+ Q3367000
872
+ Q672
873
+ Q12204
874
+ Q3947
875
+ Q42527
876
+ Q6452640
877
+ Q47542
878
+ Q207326
879
+ Q83267
880
+ Q1056901
881
+ Q593676
882
+ Q606332
883
+ Q35473
884
+ Q44167
885
+ Q11796413
886
+ Q8492
887
+ Q38942
888
+ Q21491451
889
+ Q1503716
890
+ Q71003196
891
+ Q7884320
892
+ Q1503211
893
+ Q2425052
894
+ Q108163
895
+ Q1367
896
+ Q3984767
897
+ Q309100
898
+ Q167382
899
+ Q206021
900
+ Q58401
901
+ Q185598
902
+ Q366371
903
+ Q227936
904
+ Q622662
905
+ Q1865281
906
+ Q17039022
907
+ Q44104
908
+ Q27067578
909
+ Q107196737
910
+ Q210064
911
+ Q15343
912
+ Q542
913
+ Q1751429
914
+ Q245359
915
+ Q16254265
916
+ Q8331
917
+ Q13417200
918
+ Q110014844
919
+ Q152247
920
+ Q240313
921
+ Q3
922
+ Q57616098
923
+ Q1265533
924
+ Q668
925
+ Q43013
926
+ Q8094
927
+ Q326301
928
+ Q39074527
929
+ Q12483
930
+ Q6256
931
+ Q61022630
932
+ Q1990010
933
+ Q51036317
934
+ Q2517117
935
+ Q708514
936
+ Q1141231
937
+ Q203789
938
+ Q1913301
939
+ Q11379
940
+ Q1211427
941
+ Q229385
942
+ Q899625
943
+ Q36794
944
+ Q999646
945
+ Q894231
946
+ Q16001535
947
+ Q5281334
948
+ Q376
949
+ Q3010
950
+ Q13377687
951
+ Q60300035
952
+ Q5135520
953
+ Q780
954
+ Q134041
955
+ Q546191
956
+ Q13405640
957
+ Q7560
958
+ Q7868
959
+ Q8502
960
+ Q40050
961
+ Q207766
962
+ Q324120
963
+ Q35694
964
+ Q188748
965
+ Q19100
966
+ Q49776
967
+ Q244330
968
+ Q14384
969
+ Q186819
970
+ Q140
971
+ Q11285759
972
+ Q646426
973
+ Q3972943
974
+ Q20134
975
+ Q2984138
976
+ Q1384981
977
+ Q16321420
978
+ Q107197071
979
+ Q175185
980
+ Q887540
981
+ Q130693
982
+ Q49007
983
+ Q7991
984
+ Q430
985
+ Q605384
986
+ Q2734060
987
+ Q709099
988
+ Q3133
989
+ Q191552
990
+ Q740910
991
+ Q384593
992
+ Q192056
993
+ Q667
994
+ Q638
995
+ Q739302
996
+ Q16881915
997
+ Q177777
998
+ Q97027313
999
+ Q641
1000
+ Q19211
1001
+ Q283
1002
+ Q107196431
1003
+ Q131257
1004
+ Q63676707
1005
+ Q7802
1006
+ Q208450
1007
+ Q18035603
1008
+ Q56736680
1009
+ Q168432
1010
+ Q3241121
1011
+ Q112112091
1012
+ Q11723795
1013
+ Q25235
1014
+ Q110081157
1015
+ Q1187930
1016
+ Q192152
1017
+ Q2221893
1018
+ Q913999
1019
+ Q64572893
1020
+ Q68
1021
+ Q37097368
1022
+ Q2112073
1023
+ Q20077244
1024
+ Q3100542
1025
+ Q12731
1026
+ Q40178
1027
+ Q102496
1028
+ Q184197
1029
+ Q27135598
1030
+ Q223044
1031
+ Q12739
1032
+ Q728
1033
+ Q80083
1034
+ Q50824047
1035
+ Q13430821
1036
+ Q913
1037
+ Q31374404
1038
+ Q14660
1039
+ Q991202
1040
+ Q8686
1041
+ Q48428
1042
+ Q5638
1043
+ Q1321370
1044
+ Q12898216
1045
+ Q5410500
1046
+ Q2066131
1047
+ Q22261015
1048
+ Q170584
1049
+ Q1807128
1050
+ Q693464
1051
+ Q194433
1052
+ Q7239
1053
+ Q5078274
1054
+ Q169534
1055
+ Q140565
1056
+ Q1385033
1057
+ Q7239486
1058
+ Q652698
1059
+ Q1424519
1060
+ Q48885552
1061
+ Q11035
1062
+ Q201664
1063
+ Q1365641
1064
+ Q46988452
1065
+ Q133492
1066
+ Q2798912
1067
+ Q179448
1068
+ Q7364
1069
+ Q191067
1070
+ Q10978
1071
+ Q1121708
1072
+ Q8065
1073
+ Q11435
1074
+ Q128393
1075
+ Q849816
1076
+ Q175089
1077
+ Q2332346
1078
+ Q34442
1079
+ Q12192
1080
+ Q44142
1081
+ Q15686806
1082
+ Q15760439
1083
+ Q16157710
1084
+ Q1138737
1085
+ Q12202
1086
+ Q7540126
1087
+ Q211748
1088
+ Q290691
1089
+ Q11190
1090
+ Q157811
1091
+ Q81009
1092
+ Q2409
1093
+ Q870
1094
+ Q59618763
1095
+ Q679
1096
+ Q628674
1097
+ Q80228
1098
+ Q40089
1099
+ Q53865327
1100
+ Q33979
1101
+ Q2941721
1102
+ Q830393
1103
+ Q64374257
1104
+ Q7245047
1105
+ Q3031
1106
+ Q937
1107
+ Q6084258
1108
+ Q111029
1109
+ Q11957145
1110
+ Q79137673
1111
+ Q330284
1112
+ Q17052147
1113
+ Q7362
1114
+ Q477248
1115
+ Q784445
1116
+ Q17285
1117
+ Q110628325
1118
+ Q172
1119
+ Q31920
1120
+ Q476807
1121
+ Q18245643
1122
+ Q911076
1123
+ Q154558
1124
+ Q28472
1125
+ Q72128824
1126
+ Q17
1127
+ Q42177
1128
+ Q1069725
1129
+ Q102187260
1130
+ Q10884
1131
+ Q1932016
1132
+ Q34706
1133
+ Q111725985
1134
+ Q2003356
1135
+ Q152810
1136
+ Q184453
1137
+ Q77002510
1138
+ Q133182
1139
+ Q202064
1140
+ Q157002
1141
+ Q1339862
1142
+ Q1406070
1143
+ Q214169
1144
+ Q12916
1145
+ Q742168
1146
+ Q497903
1147
+ Q95566669
1148
+ Q180538
1149
+ Q66688988
1150
+ Q15814324
1151
+ Q1090
1152
+ Q929848
1153
+ Q15026
1154
+ Q483110
1155
+ Q4439
1156
+ Q134205
1157
+ Q482994
1158
+ Q39908
1159
+ Q698996
1160
+ Q8063
1161
+ Q918385
1162
+ Q3629144
1163
+ Q1154914
1164
+ Q3565037
1165
+ Q954087
1166
+ Q623
1167
+ Q8923
1168
+ Q15816392
1169
+ Q66571844
1170
+ Q16970
1171
+ Q1444
1172
+ Q191924
1173
+ Q149566
1174
+ Q44946
1175
+ Q1827
1176
+ Q28452346
1177
+ Q879844
1178
+ Q124282
1179
+ Q16529344
1180
+ Q421744
1181
+ Q4164344
1182
+ Q19939
1183
+ Q2102
1184
+ Q32489
1185
+ Q315247
1186
+ Q408
1187
+ Q64141914
1188
+ Q2303322
1189
+ Q889503
1190
+ Q3246832
1191
+ Q29710539
1192
+ Q2155636
1193
+ Q62128996
1194
+ Q27964852
1195
+ Q677
1196
+ Q3176558
1197
+ Q184421
1198
+ Q3210331
1199
+ Q16944487
1200
+ Q11004
1201
+ Q234901
1202
+ Q1403016
1203
+ Q34770
1204
+ Q43183
1205
+ Q11748378
1206
+ Q11880006
1207
+ Q190878
1208
+ Q2615500
1209
+ Q40956
1210
+ Q15310171
1211
+ Q691310
1212
+ Q489703
1213
+ Q16960397
1214
+ Q31207
1215
+ Q205555
1216
+ Q33999
1217
+ Q44299
1218
+ Q2392887
1219
+ Q82001
1220
+ Q192935
1221
+ Q81727
1222
+ Q217446
1223
+ Q147538
1224
+ Q15783
1225
+ Q107326062
1226
+ Q191118
1227
+ Q956
1228
+ Q273176
1229
+ Q5282225
1230
+ Q34749
1231
+ Q105549747
1232
+ Q3553322
1233
+ Q112230559
1234
+ Q5088855
1235
+ Q182060
1236
+ Q3769186
1237
+ Q6060613
1238
+ Q7397
1239
+ Q23852
1240
+ Q4918
1241
+ Q749316
1242
+ Q23069713
1243
+ Q729
1244
+ Q313549
1245
+ Q2248623
1246
+ Q8495
1247
+ Q600262
1248
+ Q29256
1249
+ Q89200784
1250
+ Q166231
1251
+ Q323808
1252
+ Q47107
1253
+ Q231250
1254
+ Q2249676
1255
+ Q160117
1256
+ Q104641
1257
+ Q1542661
1258
+ Q364
1259
+ Q527
1260
+ Q1125955
1261
+ Q476300
1262
+ Q217127
1263
+ Q43164
1264
+ Q1075
1265
+ Q106526
1266
+ Q69581
1267
+ Q2068602
1268
+ Q1323314
1269
+ Q102227442
1270
+ Q1371819
1271
+ Q373342
1272
+ Q8070
1273
+ Q3067815
1274
+ Q28007056
1275
+ Q102798
1276
+ Q170877
1277
+ Q96652354
1278
+ Q83405
1279
+ Q11196749
1280
+ Q42804
1281
+ Q11453
1282
+ Q3489000
1283
+ Q1155772
1284
+ Q137056
1285
+ Q8253
1286
+ Q10843872
1287
+ Q104493
1288
+ Q343546
1289
+ Q11081619
1290
+ Q107356532
1291
+ Q21004260
1292
+ Q11554381
1293
+ Q15724995
1294
+ Q827792
1295
+ Q170494
1296
+ Q12385831
1297
+ Q37681
1298
+ Q145
1299
+ Q316
1300
+ Q7017933
1301
+ Q39546
1302
+ Q7949
1303
+ Q2718084
1304
+ Q2225
1305
+ Q213753
1306
+ Q47496130
1307
+ Q3685258
1308
+ Q913572
1309
+ Q2
1310
+ Q666412
1311
+ Q61509
1312
+ Q81513
1313
+ Q48
1314
+ Q46913
1315
+ Q174782
1316
+ Q653318
1317
+ Q319899
1318
+ Q290716
1319
+ Q131524
1320
+ Q735
1321
+ Q32789
1322
+ Q2399307
1323
+ Q152088
1324
+ Q681515
1325
+ Q207690
1326
+ Q1410600
1327
+ Q13360264
1328
+ Q1053956
1329
+ Q110014643
1330
+ Q20817212
1331
+ Q12206
1332
+ Q272002
1333
+ Q639669
1334
+ Q193837
1335
+ Q7368
1336
+ Q179076
1337
+ Q177220
1338
+ Q17297777
1339
+ Q11235817
1340
+ Q18378865
1341
+ Q1192354
1342
+ Q8148
1343
+ Q6097
1344
+ Q2699803
1345
+ Q95685937
1346
+ Q11446
1347
+ Q10711575
1348
+ Q925
1349
+ Q18245166
1350
+ Q845069
1351
+ Q3389076
1352
+ Q7291
1353
+ Q1400734
1354
+ Q160464
1355
+ Q107519268
1356
+ Q3305213
1357
+ Q12047696
1358
+ Q189299
1359
+ Q70330436
1360
+ Q915366
1361
+ Q590111
1362
+ Q29051769
1363
+ Q2581003
1364
+ Q10841757
1365
+ Q50817452
1366
+ Q27499098
1367
+ Q2362573
1368
+ Q18123008
1369
+ Q83180
1370
+ Q7829561
1371
+ Q7944
1372
+ Q192296
1373
+ Q556
1374
+ Q33602
1375
+ Q23664
1376
+ Q93352
1377
+ Q64
1378
+ Q24384
1379
+ Q78948116
1380
+ Q11438
1381
+ Q110916833
1382
+ Q17517
1383
+ Q10987
1384
+ Q10670181
1385
+ Q483426
1386
+ Q306484
1387
+ Q309
1388
+ Q5454629
1389
+ Q1439311
1390
+ Q2159907
1391
+ Q9842
1392
+ Q3914
1393
+ Q118155
1394
+ Q7391
1395
+ Q167270
1396
+ Q842096
1397
+ Q40218
1398
+ Q35831
1399
+ Q4232578
1400
+ Q15116915
1401
+ Q17089828
1402
+ Q81025
1403
+ Q45354
1404
+ Q16502
1405
+ Q385378
1406
+ Q44497
1407
+ Q872
1408
+ Q1929383
1409
+ Q3499072
1410
+ Q15332388
1411
+ Q50030
1412
+ Q219174
1413
+ Q19625
1414
+ Q1342
1415
+ Q1760704
1416
+ Q924220
1417
+ Q7275
1418
+ Q12514
1419
+ Q46384
1420
+ Q102104911
1421
+ Q1165788
1422
+ Q12337193
1423
+ Q67932942
1424
+ Q221656
1425
+ Q14092
1426
+ Q66741662
1427
+ Q160289
1428
+ Q36602
1429
+ Q622988
1430
+ Q4311765
1431
+ Q3469818
1432
+ Q85574465
1433
+ Q1756348
1434
+ Q131186
1435
+ Q154166
1436
+ Q84426668
1437
+ Q3850736
1438
+ Q13099607
1439
+ Q663
1440
+ Q8021345
1441
+ Q45530615
1442
+ Q11424
1443
+ Q317158
1444
+ Q726306
1445
+ Q1864008
1446
+ Q41487
1447
+ Q39266
1448
+ Q12372598
1449
+ Q82580
1450
+ Q1207505
1451
+ Q638608
1452
+ Q23390
1453
+ Q11016
1454
+ Q2207370
1455
+ Q131790
1456
+ Q1153484
1457
+ Q245117
1458
+ Q161736
1459
+ Q51974
1460
+ Q1975652
1461
+ Q182940
1462
+ Q920915
1463
+ Q76299
1464
+ Q108
1465
+ Q2314
1466
+ Q21201
1467
+ Q758780
1468
+ Q17428119
1469
+ Q1921606
1470
+ Q2135977
1471
+ Q349
1472
+ Q13317
1473
+ Q9200127
1474
+ Q30456678
1475
+ Q718113
1476
+ Q9614
1477
+ Q61480
1478
+ Q822282
1479
+ Q1581308
1480
+ Q2334804
1481
+ Q153988
1482
+ Q80071
1483
+ Q1299714
1484
+ Q37537637
1485
+ Q1470363
1486
+ Q920312
1487
+ Q179157
1488
+ Q455550
1489
+ Q42962
1490
+ Q11663
1491
+ Q8
1492
+ Q35872
1493
+ Q232191
1494
+ Q235544
1495
+ Q9081
1496
+ Q58968
1497
+ Q178193
1498
+ Q7313
1499
+ Q79701
1500
+ Q335101
1501
+ Q573
1502
+ Q1385709
1503
+ Q69693864
1504
+ Q9730
1505
+ Q189004
1506
+ Q12121859
1507
+ Q50386808
1508
+ Q334600
1509
+ Q23640
1510
+ Q4575936
1511
+ Q108566
1512
+ Q3304003
1513
+ Q143828
1514
+ Q1196129
1515
+ Q108325
1516
+ Q2132510
1517
+ Q726
1518
+ Q9387
1519
+ Q18250984
1520
+ Q23915873
1521
+ Q127771
1522
+ Q207694
1523
+ Q1871151
1524
+ Q4010255
1525
+ Q851415
1526
+ Q176483
1527
+ Q1097859
1528
+ Q80294
1529
+ Q217541
1530
+ Q7987
1531
+ Q35666
1532
+ Q99427841
1533
+ Q11570
1534
+ Q1437299
1535
+ Q2238184
1536
+ Q3695082
1537
+ Q46847
1538
+ Q1940914
1539
+ Q53716741
1540
+ Q64365
1541
+ Q11002
1542
+ Q9415
1543
+ Q28128222
1544
+ Q178805
1545
+ Q273318
1546
+ Q2416562
1547
+ Q81163
1548
+ Q5574826
1549
+ Q787
1550
+ Q204
1551
+ Q1554231
1552
+ Q17598913
1553
+ Q165044
1554
+ Q3142
1555
+ Q11457
1556
+ Q4991371
1557
+ Q4683829
1558
+ Q215627
1559
+ Q7434
1560
+ Q593644
1561
+ Q37262525
1562
+ Q11946202
1563
+ Q8425
1564
+ Q1127306
1565
+ Q180470
1566
+ Q61882239
1567
+ Q38867
1568
+ Q8171
1569
+ Q47146337
1570
+ Q2160801
1571
+ Q161210
1572
+ Q25212061
1573
+ Q235356
1574
+ Q12147
1575
+ Q216059
1576
+ Q5090
1577
+ Q89198120
1578
+ Q2116228
1579
+ Q858485
1580
+ Q39558
1581
+ Q336
1582
+ Q107383235
1583
+ Q265158
1584
+ Q16281769
1585
+ Q1144286
1586
+ Q34604447
1587
+ Q11442
1588
+ Q194105
1589
+ Q27334842
1590
+ Q83310
1591
+ Q93727271
1592
+ Q929
1593
+ Q4593291
1594
+ Q11024
1595
+ Q1569314
1596
+ Q1199715
1597
+ Q7590
1598
+ Q42861
1599
+ Q11020
1600
+ Q9779
1601
+ Q2357358
1602
+ Q12453
1603
+ Q42622
1604
+ Q7692360
1605
+ Q11423
1606
+ Q1616075
1607
+ Q2141879
1608
+ Q65088609
1609
+ Q1117001
1610
+ Q657449
1611
+ Q171076
1612
+ Q16
1613
+ Q332798
1614
+ Q11781028
1615
+ Q19269277
1616
+ Q108840195
1617
+ Q6000365
1618
+ Q276173
1619
+ Q3561314
1620
+ Q21075684
1621
+ Q30013662
1622
+ Q8161
1623
+ Q503968
1624
+ Q1420
1625
+ Q764
1626
+ Q1318054
1627
+ Q2095
1628
+ Q1185607
1629
+ Q37707
1630
+ Q3074482
1631
+ Q2811
1632
+ Q865564
1633
+ Q1144928
1634
+ Q21096985
1635
+ Q36539
1636
+ Q2570370
1637
+ Q5849500
1638
+ Q93189
1639
+ Q131841
1640
+ Q7504315
1641
+ Q27813916
1642
+ Q36247
1643
+ Q180516
1644
+ Q10850
1645
+ Q55691459
1646
+ Q187934
1647
+ Q60346683
1648
+ Q13377214
1649
+ Q587735
1650
+ Q168658
1651
+ Q18785956
1652
+ Q131154
1653
+ Q1249178
1654
+ Q1088223
1655
+ Q2750057
1656
+ Q40348
1657
+ Q715284
1658
+ Q204664
1659
+ Q35535
1660
+ Q167987
1661
+ Q43193
1662
+ Q465570
1663
+ Q280658
1664
+ Q169940
1665
+ Q7430330
1666
+ Q60960
1667
+ Q1889932
1668
+ Q169207
1669
+ Q19801756
1670
+ Q188507
1671
+ Q571
1672
+ Q1150771
1673
+ Q58734
1674
+ Q170579
1675
+ Q89
1676
+ Q4323994
1677
+ Q369012
1678
+ Q467
1679
+ Q215380
1680
+ Q22731
1681
+ Q11460
1682
+ Q1355
1683
+ Q758
1684
+ Q36611
1685
+ Q4126704
1686
+ Q8068
1687
+ Q1313
1688
+ Q10513727
1689
+ Q146701
1690
+ Q10989264
1691
+ Q473194
1692
+ Q739702
1693
+ Q9612
1694
+ Q3915542
1695
+ Q185684
1696
+ Q12779002
1697
+ Q1527264
1698
+ Q41050
1699
+ Q1267889
1700
+ Q35102
1701
+ Q60168
1702
+ Q49638525
1703
+ Q254651
1704
+ Q464980
1705
+ Q11412
1706
+ Q75
1707
+ Q181036
1708
+ Q137823
1709
+ Q3332814
1710
+ Q102231
1711
+ Q11812678
1712
+ Q41803
1713
+ Q10273457
1714
+ Q468756
1715
+ Q10494269
1716
+ Q2048319
1717
+ Q899192
1718
+ Q272870
1719
+ Q15645384
1720
+ Q16963
1721
+ Q12136
1722
+ Q1406
1723
+ Q144
1724
+ Q775343
1725
+ Q420
1726
+ Q110714168
1727
+ Q79030284
1728
+ Q37260
1729
+ Q188628
1730
+ Q132
1731
+ Q28451497
1732
+ Q8441
1733
+ Q28922
1734
+ Q3102631
1735
+ Q106980799
1736
+ Q352842
1737
+ Q3711329
1738
+ Q8142
1739
+ Q356031
1740
+ Q23392
1741
+ Q23229
1742
+ Q1800324
1743
+ Q1360926
1744
+ Q861951
1745
+ Q1884224
1746
+ Q1860
1747
+ Q30612297
1748
+ Q7001368
1749
+ Q3130
1750
+ Q1566584
1751
+ Q2920921
1752
+ Q96633923
1753
+ Q202875
1754
+ Q1904
1755
+ Q63485459
1756
+ Q15751090
1757
+ Q897
1758
+ Q1103
1759
+ Q484083
1760
+ Q11106
1761
+ Q211382
1762
+ Q1377840
1763
+ Q45761
1764
+ Q875937
1765
+ Q313
1766
+ Q275038
1767
+ Q318028
1768
+ Q1329012
1769
+ Q165
1770
+ Q219469
1771
+ Q195
1772
+ Q22471755
1773
+ Q12503
1774
+ Q49845
1775
+ Q233894
1776
+ Q875696
1777
+ Q1499786
1778
+ Q1165721
1779
+ Q27198272
1780
+ Q7075
1781
+ Q3980888
1782
+ Q659
1783
+ Q989963
1784
+ Q432449
1785
+ Q207645
1786
+ Q184194
1787
+ Q193788
1788
+ Q23442
1789
+ Q162455
1790
+ Q503
1791
+ Q47528
1792
+ Q11404
1793
+ Q1456832
1794
+ Q12772819
1795
+ Q20888800
1796
+ Q4970
1797
+ Q134768
1798
+ Q1498
1799
+ Q162247
1800
+ Q41354
1801
+ Q1030213
1802
+ Q4026292
1803
+ Q7169333
1804
+ Q1391831
1805
+ Q22698
1806
+ Q43
1807
+ Q121176
1808
+ Q170079
1809
+ Q842346
1810
+ Q3545708
1811
+ Q327055
1812
+ Q47817360
1813
+ Q9135
1814
+ Q35661296
1815
+ Q1072166
1816
+ Q457931
1817
+ Q1305415
1818
+ Q215302
1819
+ Q23397
1820
+ Q270952
1821
+ Q187456
1822
+ Q110583109
1823
+ Q193934
1824
+ Q4689421
1825
+ Q5477756
1826
+ Q36133
1827
+ Q276258
1828
+ Q855220
1829
+ Q544
1830
+ Q8201
1831
+ Q4344358
1832
+ Q223393
1833
+ Q1430731
1834
+ Q127980
1835
+ Q483247
1836
+ Q219160
1837
+ Q107724490
1838
+ Q8054
1839
+ Q2092297
1840
+ Q28865
1841
+ Q209542
1842
+ Q17107707
1843
+ Q318
1844
+ Q3966720
1845
+ Q7565
1846
+ Q43365
1847
+ Q134566
1848
+ Q6881511
1849
+ Q949930
1850
+ Q11391
1851
+ Q768575
1852
+ Q27910497
1853
+ Q659600
1854
+ Q862867
1855
+ Q23835475
1856
+ Q26376690
1857
+ Q13539073
1858
+ Q106080
1859
+ Q2325497
1860
+ Q366134
1861
+ Q6685124
1862
+ Q11822
1863
+ Q106767198
1864
+ Q163759
1865
+ Q42501
1866
+ Q57495609
1867
+ Q79478214
1868
+ Q2923673
1869
+ Q815758
1870
+ Q211503
1871
+ Q3918
1872
+ Q112123596
1873
+ Q747713
1874
+ Q23413
1875
+ Q40231
1876
+ Q29572836
1877
+ Q29558624
1878
+ Q1324197
1879
+ Q19829510
1880
+ Q832237
1881
+ Q14083
1882
+ Q5419647
1883
+ Q178026
1884
+ Q4698686
1885
+ Q150425
1886
+ Q832451
1887
+ Q5043
1888
+ Q190578
1889
+ Q816345
1890
+ Q37287750
1891
+ Q98929991
1892
+ Q943
1893
+ Q192199
1894
+ Q107998880
1895
+ Q657221
1896
+ Q1175042
1897
+ Q5113
1898
+ Q31805992
1899
+ Q844482
1900
+ Q271960
1901
+ Q2908608
1902
+ Q2472587
1903
+ Q1758905
1904
+ Q178748
1905
+ Q1139344
1906
+ Q16751793
1907
+ Q142269
1908
+ Q133772
1909
+ Q367293
1910
+ Q107412
1911
+ Q378681
1912
+ Q184395
1913
+ Q1758354
1914
+ Q718
1915
+ Q190429
1916
+ Q12859788
1917
+ Q11436
1918
+ Q2809148
1919
+ Q12705
1920
+ Q15174
1921
+ Q42998
1922
+ Q845329
1923
+ Q13276
1924
+ Q2735883
1925
+ Q1036729
1926
+ Q699612
1927
+ Q11982
1928
+ Q12861
1929
+ Q1141116
1930
+ Q53865454
1931
+ Q14748
1932
+ Q66571843
1933
+ Q194425
1934
+ Q65386997
1935
+ Q48422
1936
+ Q110551902
1937
+ Q281928
1938
+ Q12501
1939
+ Q17205
1940
+ Q59668787
1941
+ Q19862215
1942
+ Q10971235
1943
+ Q1549506
1944
+ Q27540169
1945
+ Q798505
1946
+ Q1097498
1947
+ Q622821
1948
+ Q703534
1949
+ Q105985
1950
+ Q182925
1951
+ Q1292038
1952
+ Q1153773
1953
+ Q60142
1954
+ Q1643184
1955
+ Q315
1956
+ Q183
1957
+ Q207174
1958
+ Q1266946
1959
+ Q7220961
1960
+ Q104763414
1961
+ Q7632586
1962
+ Q5604190
1963
+ Q37456277
1964
+ Q60300628
1965
+ Q11417
1966
+ Q3543591
1967
+ Q193472
1968
+ Q7605452
1969
+ Q28755323
1970
+ Q2637814
1971
+ Q483242
1972
+ Q37754875
1973
+ Q640500
1974
+ Q5516863
1975
+ Q19861552
1976
+ Q124072
1977
+ Q515
1978
+ Q97849650
1979
+ Q4167836
1980
+ Q212758
1981
+ Q43483
1982
+ Q27650233
1983
+ Q37726
1984
+ Q830
1985
+ Q713200
1986
+ Q194428
1987
+ Q11421
1988
+ Q1368665
1989
+ Q204077
1990
+ Q178651
1991
+ Q111653583
1992
+ Q1
1993
+ Q5881191
1994
+ Q6081679
1995
+ Q11472
1996
+ Q177378
1997
+ Q190672
1998
+ Q131514
1999
+ Q14536140
2000
+ Q285726
2001
+ Q113558
2002
+ Q1900326
2003
+ Q2957747
2004
+ Q11060274
2005
+ Q6499736
2006
+ Q41534
2007
+ Q2144962
2008
+ Q15175
2009
+ Q169251
2010
+ Q46944820
2011
+ Q471898
2012
+ Q56429795
2013
+ Q2138622
2014
+ Q287
2015
+ Q2998430
2016
+ Q362872
2017
+ Q16666
2018
+ Q80079
2019
+ Q174165
2020
+ Q127418
2021
+ Q69946132
2022
+ Q24489
2023
+ Q557945
2024
+ Q169470
2025
+ Q890886
2026
+ Q161179
2027
+ Q36465
2028
+ Q413
2029
+ Q2987425
2030
+ Q6817227
2031
+ Q854429
2032
+ Q7925
2033
+ Q5320
2034
+ Q1093742
2035
+ Q7969566
2036
+ Q133105
2037
+ Q123120
2038
+ Q488934
2039
+ Q202071
2040
+ Q757554
2041
+ Q3478630
2042
+ Q37226
2043
+ Q165328
2044
+ Q3695508
2045
+ Q3105497
2046
+ Q708
2047
+ Q28575
2048
+ Q988343
2049
+ Q212238
2050
+ Q124714
2051
+ Q23445
2052
+ Q11408
2053
+ Q2594281
2054
+ Q987767
2055
+ Q309035
2056
+ Q179661
2057
+ Q3077570
2058
+ Q30093000
2059
+ Q487623
2060
+ Q7946
2061
+ Q3736439
2062
+ Q152024
2063
+ Q1865430
dataset/MARS/analogy_entity_to_wiki_qid.txt ADDED
@@ -0,0 +1,2411 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mall Q219469
2
+ internal Q66571844
3
+ disputes Q180684
4
+ seeders Q111725985
5
+ rayon Q910194
6
+ podium Q60142
7
+ fanatism Q39074527
8
+ adore Q366371
9
+ sweets Q2464807
10
+ occupied Q20723554
11
+ postcard Q192425
12
+ elephant Q7378
13
+ route Q1051976
14
+ mathematician Q170790
15
+ antiques Q472760
16
+ writ Q1001059
17
+ armchair Q11285759
18
+ tonne Q191118
19
+ shipping Q1937808
20
+ success Q7632586
21
+ energy saving Q380170
22
+ rain Q7925
23
+ plan Q1371819
24
+ negotiation Q202875
25
+ seawater Q184395
26
+ organ Q1444
27
+ ritual vessel Q17379796
28
+ cucumber Q2735883
29
+ rice paper Q1066536
30
+ buoyancy Q6497624
31
+ explore Q97849650
32
+ chinese painting Q919348
33
+ personification Q207174
34
+ christian Q18001597
35
+ zip Q204
36
+ tongue Q9614
37
+ landfill Q152810
38
+ rational Q102104911
39
+ wind Q8094
40
+ creek Q1437299
41
+ bear market Q2486664
42
+ sound waves Q3629144
43
+ telephone Q11035
44
+ cake Q13276
45
+ top Q1435365
46
+ chlorophyll Q43177
47
+ pine Q1503211
48
+ desert Q8514
49
+ woods Q4421
50
+ villain Q290691
51
+ orchestration Q3367000
52
+ pepper Q201959
53
+ building material Q206615
54
+ competent Q1780863
55
+ bedding Q1762457
56
+ dirty Q107998873
57
+ famine Q168247
58
+ lion group Q6555422
59
+ small Q24245823
60
+ burger Q6663
61
+ civil servants Q63485459
62
+ fast Q191924
63
+ sauerkraut Q154166
64
+ word Q8171
65
+ child Q7569
66
+ short Q106526
67
+ petal Q107412
68
+ defense Q1985622
69
+ hotel Q27686
70
+ table Q14748
71
+ peach Q13189
72
+ outside Q30014662
73
+ children Q21075684
74
+ community Q177634
75
+ violin Q8355
76
+ law Q7748
77
+ cinema Q11424
78
+ preamble to the constitution Q3345385
79
+ petroleum Q22656
80
+ performer Q713200
81
+ hairnet Q1566593
82
+ restaurant Q11707
83
+ nomad Q128393
84
+ spring Q124714
85
+ hatred Q160232
86
+ region Q82794
87
+ abundant Q223722
88
+ mortal Q65940913
89
+ motor train Q65375807
90
+ rational numbers Q47005650
91
+ formulate Q57650068
92
+ omelette Q20129
93
+ step Q53996838
94
+ society Q8425
95
+ conductive core Q37384866
96
+ sunny Q233894
97
+ rectangle Q209
98
+ sue Q16281769
99
+ clear Q306484
100
+ zipper Q101761
101
+ world Q16502
102
+ accident Q171558
103
+ toiletry Q3246832
104
+ food poisoning Q272002
105
+ pulp Q1470757
106
+ zoo Q43501
107
+ positive number Q3176558
108
+ cure Q1595418
109
+ logical Q364
110
+ new york city Q60
111
+ electrical appliances Q60300628
112
+ rest Q879844
113
+ news Q38926
114
+ pause Q17562059
115
+ hospitalization Q3140971
116
+ car Q929
117
+ weather Q11663
118
+ carriage Q235356
119
+ coal Q24489
120
+ cutlery Q81944
121
+ penalty Q2068602
122
+ quality control Q827792
123
+ hurdles Q80024808
124
+ preview Q1324197
125
+ integrity Q1329012
126
+ castle Q23413
127
+ museums Q207694
128
+ solar energy Q40015
129
+ comb Q23834
130
+ mom Q7560
131
+ beef Q192628
132
+ graduate student Q18245166
133
+ completion Q3685258
134
+ jaguar Q35694
135
+ hydrochloric acid Q2409
136
+ province Q34876
137
+ wedding ring Q842096
138
+ traffic Q4323994
139
+ actor Q33999
140
+ market Q37654
141
+ sedan Q190578
142
+ reward and punishment Q33943815
143
+ harvester Q4229435
144
+ tissue Q40397
145
+ young Q845069
146
+ cell Q7868
147
+ soy Q11006
148
+ mesh Q691310
149
+ faithful Q1755355
150
+ subordinate Q78070424
151
+ judges Q81240
152
+ drizzle Q211521
153
+ stanza Q1756348
154
+ antarctica Q51
155
+ press conference Q272281
156
+ rattlesnake Q10282840
157
+ dinosaur Q430
158
+ wall Q42948
159
+ west Q679
160
+ enterprise Q6881511
161
+ crane Q178692
162
+ field trip Q8013033
163
+ historical site Q27916659
164
+ bar Q187456
165
+ clean energy Q4116742
166
+ lychee Q105428070
167
+ nature Q7860
168
+ speaker Q1758037
169
+ panda Q3850736
170
+ corrosion Q137056
171
+ spaceship Q3553322
172
+ real number Q12916
173
+ desktop Q9127910
174
+ additives Q4681350
175
+ culture Q11042
176
+ programs Q85574465
177
+ package Q1995545
178
+ promotion Q574433
179
+ walk Q2236563
180
+ concept Q151885
181
+ fax Q132744
182
+ beard Q42804
183
+ keep warm Q107008077
184
+ pancake Q44541
185
+ united kingdom Q145
186
+ network Q1900326
187
+ kitchenware Q1521410
188
+ test Q27318
189
+ slow Q628674
190
+ nails Q36864
191
+ gala Q200538
192
+ harmony Q184421
193
+ graduate students Q103838588
194
+ evacuation Q606332
195
+ poultry Q178559
196
+ poems Q5185279
197
+ anaconda Q3306164
198
+ security check Q11898832
199
+ gauze Q618710
200
+ human Q5
201
+ trial Q8016240
202
+ species extinction Q123509
203
+ the earth Q2
204
+ china Q148
205
+ popcorn Q924220
206
+ time Q11471
207
+ altar Q101687
208
+ potatoes Q7235103
209
+ filename Q1144928
210
+ suzhou Q42622
211
+ peach pit Q27650233
212
+ backwardness Q2362573
213
+ rise Q1139344
214
+ blue Q1088
215
+ guilty Q705178
216
+ perch Q600262
217
+ muddy Q2581003
218
+ anti-epidemic Q102227442
219
+ statistics Q12483
220
+ nitrogen Q627
221
+ fail Q1121708
222
+ sand painting Q1439311
223
+ refrigerator Q37828
224
+ meal Q6460735
225
+ peasant Q838811
226
+ process Q10843872
227
+ fire Q3196
228
+ laptop Q3962
229
+ money Q1368
230
+ virtue Q157811
231
+ dishwasher Q186263
232
+ thundercloud Q37319371
233
+ athlete Q2066131
234
+ jeans Q83363
235
+ heating Q11880006
236
+ primate Q7380
237
+ truck Q43193
238
+ knowing Q9081
239
+ peking university Q16952
240
+ ears Q7362
241
+ scholar Q2248623
242
+ plateau Q75520
243
+ christianity Q5043
244
+ pig iron Q901785
245
+ goat Q2934
246
+ pale Q593676
247
+ scared Q44619
248
+ soybean Q61503220
249
+ envy Q188739
250
+ station Q12819564
251
+ sunglasses Q217541
252
+ staff Q6813432
253
+ wechat Q283233
254
+ permission Q7169333
255
+ aid Q23835475
256
+ soil erosion Q889503
257
+ terrified Q3984767
258
+ lobby Q31948
259
+ torso Q160695
260
+ soldier Q4991371
261
+ mediterranean sea Q986177
262
+ celebration Q3010205
263
+ silicosis Q653318
264
+ aeroplane Q197
265
+ flute Q11405
266
+ data Q42848
267
+ criminal Q2159907
268
+ rhetoric Q81009
269
+ street Q79007
270
+ boring Q3210331
271
+ rotten Q49638525
272
+ lighting Q210064
273
+ chair Q15026
274
+ sheep Q7368
275
+ building Q41176
276
+ women Q467
277
+ after sales Q4690798
278
+ guide dogs Q38782
279
+ hebei university Q835731
280
+ utensils Q110014643
281
+ hair Q28472
282
+ jiangxi Q57052
283
+ cross talk Q1943920
284
+ talent Q190139
285
+ japan Q17
286
+ literary genre Q223393
287
+ leather shoes Q71003196
288
+ cervix Q666412
289
+ tires Q169545
290
+ kitten Q147
291
+ vaccines Q134808
292
+ ask Q918257
293
+ dessert Q182940
294
+ mercury Q925
295
+ magnet Q11421
296
+ plow Q47459710
297
+ cable Q646462
298
+ atmosphere Q8104
299
+ eyes Q7364
300
+ ducks Q17319881
301
+ worship Q2514663
302
+ forensics Q495304
303
+ break up Q833566
304
+ quality Q1207505
305
+ primary school Q9842
306
+ cruise Q2063640
307
+ express Q610190
308
+ averse Q60528728
309
+ revising Q102397207
310
+ beauty Q7242
311
+ steering wheel Q679300
312
+ partner Q7140693
313
+ nursing Q121176
314
+ beautiful Q813449
315
+ landmine Q178795
316
+ suv Q192152
317
+ chapel Q108325
318
+ triceratops Q14384
319
+ posterior Q2132510
320
+ lethargic Q15729017
321
+ retreat Q1760704
322
+ sulfur dioxide Q5282
323
+ parade Q657449
324
+ lamps Q1138737
325
+ real estate Q10494269
326
+ incentive Q1414816
327
+ tribe Q227936
328
+ attorney Q40348
329
+ buying Q96652354
330
+ hebei Q21208
331
+ north Q659
332
+ chuckle Q170579
333
+ cabin Q3649129
334
+ short stories Q2357358
335
+ reverse Q1542661
336
+ gaggle Q5516863
337
+ pittsburgh Q1342
338
+ coyote Q44299
339
+ measurement Q12453
340
+ sweet cake Q31808687
341
+ mammal Q7377
342
+ qinghai lake Q201294
343
+ doorknob Q23009
344
+ sofa Q131514
345
+ mouse Q7987
346
+ velociraptor Q14403
347
+ suits Q370185
348
+ senator Q15686806
349
+ pedals Q27540169
350
+ eggs Q1760818
351
+ emergencies Q93727271
352
+ hard Q699612
353
+ club Q988108
354
+ blitzkrieg Q162247
355
+ rainstorm Q65510617
356
+ chilly Q270952
357
+ essential Q30093000
358
+ suspension Q272870
359
+ teeth Q19861552
360
+ deluge Q134052
361
+ computer room Q5157576
362
+ live Q165714
363
+ gorilla Q36611
364
+ roof Q83180
365
+ resource Q1554231
366
+ drum Q11404
367
+ chest Q366134
368
+ good wine Q112230559
369
+ harvest Q213753
370
+ big Q179157
371
+ snub Q18378865
372
+ hoe Q131154
373
+ airplane Q197
374
+ environmental protection Q832237
375
+ blender Q501862
376
+ laser Q38867
377
+ plants Q756
378
+ intellectuals Q58968
379
+ chap Q16875619
380
+ mechanics Q41217
381
+ jewelry Q161439
382
+ swallow Q1401371
383
+ bicycle Q11442
384
+ mixer Q954087
385
+ captain Q19100
386
+ payment Q1148747
387
+ doors and windows Q16835776
388
+ principal Q7245047
389
+ chinese Q7850
390
+ drugs Q8386
391
+ playing Q110458661
392
+ diabetes Q12206
393
+ veto Q65474590
394
+ equilateral triangle Q157002
395
+ flavoring Q4173974
396
+ hawk Q846664
397
+ dogs Q144
398
+ doors Q45354
399
+ natural science Q7991
400
+ parallelogram Q45867
401
+ band Q215380
402
+ comics Q1004
403
+ asia Q48
404
+ teapot Q245005
405
+ fruit wine Q633625
406
+ ballot Q905151
407
+ management Q2920921
408
+ income Q1527264
409
+ jewel Q83437
410
+ creativity Q170658
411
+ knife Q32489
412
+ upward Q4010255
413
+ fish tail Q107246611
414
+ household appliances Q212920
415
+ plot Q1758354
416
+ evaluate Q63013258
417
+ cool Q687900
418
+ after reading Q13428690
419
+ light Q9128
420
+ economic crisis Q290178
421
+ sword Q12791
422
+ laborer Q19862215
423
+ software Q7397
424
+ dead sea Q23883
425
+ performance Q35140
426
+ bag Q1323314
427
+ dynamic Q113558
428
+ flour Q36465
429
+ lexicon Q8096
430
+ wither Q2594281
431
+ headphones Q186819
432
+ raptor Q48428
433
+ ladder Q168639
434
+ denim Q652698
435
+ art Q735
436
+ architecture Q12271
437
+ immediately Q6004788
438
+ tools Q89198120
439
+ pontoon Q693340
440
+ viscera Q546191
441
+ writing Q37260
442
+ earthquake Q7944
443
+ express delivery Q57616098
444
+ calcium hydroxide Q182849
445
+ justice Q13189320
446
+ root carving Q1030213
447
+ egg Q93189
448
+ pad Q378681
449
+ interior Q2998430
450
+ perfume Q131746
451
+ bright Q221656
452
+ shijiazhuang Q58401
453
+ supervision Q1175042
454
+ hard disk Q4439
455
+ australia Q408
456
+ rare Q58157328
457
+ oil paint Q296955
458
+ newspaper Q11032
459
+ wolf Q3711329
460
+ scene Q1185607
461
+ amusing Q2916374
462
+ research and develop Q276099
463
+ dead Q48422
464
+ supplies Q47462850
465
+ nation Q6266
466
+ measure blood pressure Q95422148
467
+ scull Q18785956
468
+ thermometer Q646
469
+ siege Q188055
470
+ chain bridge Q10513727
471
+ communication Q11024
472
+ transport Q7590
473
+ students Q48282
474
+ puncture Q3396153
475
+ osaka Q35765
476
+ beer Q44
477
+ under Q15332388
478
+ lift Q194433
479
+ southwest Q2381698
480
+ pain Q81938
481
+ hire Q37540140
482
+ stapler Q489157
483
+ board Q865588
484
+ hesitating Q12348865
485
+ clothes Q11460
486
+ persistence Q368671
487
+ mooring Q587735
488
+ capital Q5119
489
+ english Q1860
490
+ calendar Q12132
491
+ skirt Q2160801
492
+ colleagues Q4227944
493
+ illegal act Q1456832
494
+ jog Q1430731
495
+ sow Q37148043
496
+ shore Q468756
497
+ diatoms Q61882239
498
+ sorrow Q2809148
499
+ peking opera Q335101
500
+ customs Q367293
501
+ personnel Q105764136
502
+ story Q831691
503
+ middle school student Q16003532
504
+ ideal Q840396
505
+ conductive Q1581308
506
+ france Q142
507
+ starting Q106476041
508
+ water bottle Q6817227
509
+ village Q532
510
+ proposition Q108163
511
+ modern Q20738981
512
+ medicine Q11190
513
+ school Q3914
514
+ heat insulation Q39456562
515
+ modeling software Q112123596
516
+ courage Q208160
517
+ familiar Q386905
518
+ dry ice Q194306
519
+ white pigeon Q2028499
520
+ police station Q861951
521
+ saltpeter Q12970360
522
+ couch Q131514
523
+ jade Q175089
524
+ apartment Q188507
525
+ filament Q2365301
526
+ popular music Q373342
527
+ polo Q134211
528
+ soda Q147538
529
+ the usa Q30
530
+ sunlight Q193788
531
+ python Q28865
532
+ fire extinguisher Q190672
533
+ proverbs Q35102
534
+ singer Q177220
535
+ fishing Q14373
536
+ antelope Q25894
537
+ ferry Q25653
538
+ dried fruit Q234901
539
+ registration Q2399307
540
+ crisps Q109121530
541
+ greening Q5604190
542
+ gym Q14092
543
+ politician Q82955
544
+ litigation Q107364261
545
+ blade Q330946
546
+ citrus Q81513
547
+ fast food restaurant Q1751429
548
+ bun Q13377687
549
+ emotions Q9415
550
+ media Q340169
551
+ like Q111653583
552
+ feathers Q81025
553
+ mobility Q96622169
554
+ opera Q1344
555
+ bowl Q153988
556
+ microphone Q46384
557
+ river Q4022
558
+ canteen Q1127296
559
+ ruler Q1097498
560
+ date pit Q91209086
561
+ first Q19269277
562
+ entrance Q1137365
563
+ production Q739302
564
+ bamboo flute Q54820129
565
+ exposure Q271960
566
+ climber Q86135347
567
+ display Q778384
568
+ fireworks Q127933
569
+ inferiority Q21171262
570
+ eliminate Q3051005
571
+ pillow Q99895
572
+ lullaby Q193605
573
+ white jade Q2221893
574
+ panic Q208450
575
+ grassland Q1006733
576
+ student union Q877998
577
+ style Q1292119
578
+ inhale Q832451
579
+ tool Q39546
580
+ pigeon Q2984138
581
+ chestnut Q1406070
582
+ bark Q184453
583
+ tactics Q207645
584
+ stewardess Q12337193
585
+ asphalt Q202251
586
+ strap Q3217573
587
+ neutron Q2348
588
+ state Q7275
589
+ zinc Q758
590
+ price increase Q7242589
591
+ orchestra Q42998
592
+ food expenditure Q39982873
593
+ logistics Q177777
594
+ material Q214609
595
+ deer Q23390
596
+ wealthy Q7978035
597
+ white cloud Q2517117
598
+ sea ​​water Q184395
599
+ cherry Q196
600
+ orchid Q142269
601
+ pencil Q14674
602
+ cement Q45190
603
+ crave Q63677013
604
+ pants Q39908
605
+ racing horse Q53716741
606
+ seafront Q63676707
607
+ the ozone layer Q35828165
608
+ hair clip Q1566584
609
+ chaohu Q855220
610
+ member Q9200127
611
+ feed Q2095
612
+ scream Q1932016
613
+ camcorder Q335234
614
+ swarm Q1199715
615
+ over Q1210093
616
+ president Q30461
617
+ monument Q4989906
618
+ pork Q191768
619
+ chimpanzee Q4126704
620
+ palace Q16560
621
+ natural disaster Q8065
622
+ porcupine Q1223829
623
+ farmhouse Q489357
624
+ photosynthesis Q11982
625
+ banana Q503
626
+ tsunami Q8070
627
+ painting Q3305213
628
+ juicer Q12334336
629
+ bonfire Q9505308
630
+ building a house Q77073841
631
+ stationery Q875696
632
+ cocoa Q45912917
633
+ ocean Q9430
634
+ oven Q36539
635
+ soy milk Q192199
636
+ rifle Q124072
637
+ drama Q25372
638
+ soymilk Q192199
639
+ memorial Q5003624
640
+ window glass Q3561314
641
+ additive Q350176
642
+ monochrome Q11231895
643
+ folk art Q1153484
644
+ bonsai Q64365
645
+ fine Q1243001
646
+ false Q5432619
647
+ peach blossom Q640500
648
+ bedroom Q193837
649
+ guitar Q6607
650
+ learning Q133500
651
+ picture Q96093619
652
+ rodent Q10850
653
+ mistake Q3732574
654
+ advertisement Q39911916
655
+ numb Q19973
656
+ prudence Q1165788
657
+ yarn Q49007
658
+ movement Q929848
659
+ market economy Q179522
660
+ remember Q18153484
661
+ policeman Q384593
662
+ cloud Q8074
663
+ interesting Q6046225
664
+ milligram Q3241121
665
+ damp Q2565924
666
+ spout Q54803638
667
+ middle school Q149566
668
+ microwave oven Q127956
669
+ bull market Q2116228
670
+ bus Q5638
671
+ drawings Q93184
672
+ natural disasters Q99427841
673
+ place Q98929991
674
+ concert Q182832
675
+ dial Q2827871
676
+ bat Q12047696
677
+ congregation Q2135977
678
+ silver Q1090
679
+ throwing Q12898216
680
+ fridge Q37828
681
+ room Q180516
682
+ canine Q19625
683
+ afraid Q4689421
684
+ receiver Q1339255
685
+ towel Q131696
686
+ red sandalwood Q2392887
687
+ flood Q8068
688
+ precipitous Q7239486
689
+ delivery and use Q34704992
690
+ clowder Q16869114
691
+ virus Q808
692
+ hua luogeng Q590111
693
+ reed Q2734060
694
+ hyphenation Q11994045
695
+ mount tai Q216059
696
+ disaster mitigation Q5281334
697
+ ceremony Q2627975
698
+ infestation Q1292038
699
+ razor Q13422881
700
+ spacecraft Q40218
701
+ supermarket Q180846
702
+ sydney Q3130
703
+ investigation Q21004260
704
+ creative Q667982
705
+ womb Q9612
706
+ friction Q82580
707
+ weekday Q41825
708
+ gardener Q758780
709
+ senate Q2570643
710
+ sodium chloride Q2314
711
+ print Q11060274
712
+ extension Q1384981
713
+ outdated Q57495609
714
+ airflow Q4698686
715
+ oil paper Q11554381
716
+ end Q12769393
717
+ rules of conduct Q59002477
718
+ entertainment Q173799
719
+ manager Q2462658
720
+ livestock Q103459
721
+ competition Q476300
722
+ sleep Q35831
723
+ byte Q8799
724
+ confectionery Q18646422
725
+ poyang lake Q207690
726
+ venice Q641
727
+ flowing water Q355304
728
+ breakfast Q80973
729
+ cream Q13228
730
+ submit Q86642263
731
+ cola Q134041
732
+ rose Q102231
733
+ seats Q37287750
734
+ tanned Q3980888
735
+ zebra Q32789
736
+ gem Q8923
737
+ tasty Q1758905
738
+ sea Q165
739
+ window Q35473
740
+ glacier Q35666
741
+ little dog Q48816342
742
+ respond Q604748
743
+ pheasant Q214169
744
+ table lamp Q36641511
745
+ red wine Q1827
746
+ physics Q413
747
+ mathematical models Q12121859
748
+ sketch Q5078274
749
+ ugly Q4080955
750
+ on Q1904
751
+ scarce Q7430330
752
+ river water Q30092769
753
+ drying Q278061
754
+ summer Q1313
755
+ song Q7366
756
+ person Q215627
757
+ pin Q838312
758
+ ahead Q17428119
759
+ field Q188869
760
+ thread Q1391831
761
+ corn Q154558
762
+ church Q16970
763
+ noisy Q11235817
764
+ fe2o3 Q3067815
765
+ alive Q332798
766
+ stagflation Q185245
767
+ grapes Q10978
768
+ enzyme Q8047
769
+ closure Q5135520
770
+ flower Q506
771
+ rice Q5090
772
+ netizen Q167382
773
+ aware Q55691459
774
+ species Q7432
775
+ cation Q326277
776
+ vinyl fiber Q11903389
777
+ fish Q152
778
+ three gorges Q12514
779
+ weapon Q728
780
+ admiration Q2824657
781
+ commodity Q317088
782
+ bridge Q12280
783
+ arbor Q23457088
784
+ steamship Q12859788
785
+ biogas Q171076
786
+ coast Q93352
787
+ reward Q66688988
788
+ invest Q6060613
789
+ nourishing Q50386808
790
+ bucket Q47107
791
+ rural areas Q175185
792
+ tree Q10884
793
+ container Q987767
794
+ lotus Q16528
795
+ ship Q11446
796
+ pie Q13360264
797
+ comparison Q1720648
798
+ carving Q18448934
799
+ convenient Q36247
800
+ rural area Q175185
801
+ furious Q27198272
802
+ repair Q2144962
803
+ sky Q527
804
+ raid Q476807
805
+ lily Q40458125
806
+ antler Q29838581
807
+ struggle Q14536140
808
+ conch Q62792
809
+ trigger Q336751
810
+ carbon dioxide Q1997
811
+ fiction Q8253
812
+ activated carbon Q190878
813
+ cushion Q10971235
814
+ weight Q25288
815
+ minor Q170079
816
+ nutrition Q2138622
817
+ selected poems Q77002510
818
+ pottery Q11642
819
+ water Q283
820
+ music notation Q233861
821
+ temperature Q11466
822
+ season Q24384
823
+ words Q1321370
824
+ lawn Q207766
825
+ rebar Q150425
826
+ radar Q47528
827
+ ditch Q2048319
828
+ piano piece Q1746015
829
+ alphabet Q9779
830
+ pesticides Q131656
831
+ shirt Q76768
832
+ service Q7406919
833
+ couple Q219160
834
+ peel Q171187
835
+ marketplace Q330284
836
+ laugh Q170579
837
+ flat Q7001368
838
+ awful Q3325266
839
+ cotton thread Q64634437
840
+ lion king Q106980799
841
+ post Q49845
842
+ organization Q43229
843
+ honey Q10987
844
+ delicate Q11196749
845
+ purple light Q2118942
846
+ necklace Q189299
847
+ infectious diseases Q788926
848
+ sneakers Q1929383
849
+ book Q571
850
+ musical note Q55400101
851
+ paws Q3050175
852
+ swimming Q31920
853
+ duck Q3736439
854
+ smelting Q2748405
855
+ hungry Q3535686
856
+ yuan dynasty Q7313
857
+ coffee Q8486
858
+ parts Q45085932
859
+ falcon Q202071
860
+ joystick Q178805
861
+ harbour Q283202
862
+ the constitution Q779
863
+ train Q870
864
+ transplanting Q356031
865
+ pearl river delta Q1144286
866
+ cocktail Q134768
867
+ construction Q385378
868
+ saltwater lake Q21955018
869
+ silver carp Q76115
870
+ hardness Q3236003
871
+ downslope Q110628325
872
+ madam Q2011889
873
+ nucleus Q40260
874
+ crime Q83267
875
+ wire Q551997
876
+ in Q668
877
+ landslides Q15751090
878
+ electric lamp Q107197071
879
+ exciting Q5324675
880
+ health care Q31207
881
+ owl Q8021345
882
+ stone Q22731
883
+ social sciences Q34749
884
+ seasoning Q773108
885
+ lubricant Q323840
886
+ acid rain Q40178
887
+ ecstatic Q920312
888
+ turkey Q43
889
+ united states Q30
890
+ deodorant Q309035
891
+ life Q3
892
+ broad beans Q110014844
893
+ feeling Q205555
894
+ participation Q1773152
895
+ rock sugar Q849816
896
+ pollution Q58734
897
+ high speed ​​rail Q211382
898
+ loose Q488934
899
+ metal Q11426
900
+ distance Q126017
901
+ hongze lake Q1154914
902
+ pill Q1097859
903
+ speeding Q17598913
904
+ shell Q318028
905
+ gown Q1036729
906
+ bead Q1053956
907
+ life science Q420
908
+ city Q515
909
+ father Q7565
910
+ milk Q8495
911
+ cheap Q5088855
912
+ baffled Q18250984
913
+ black box Q29256
914
+ film Q11424
915
+ intellectual property Q131257
916
+ hailstorm Q61071643
917
+ taking medicine Q42764222
918
+ oil field Q211748
919
+ rockery Q1141231
920
+ garden Q1107656
921
+ antenna Q204664
922
+ last Q30013662
923
+ treat Q1127306
924
+ bricks Q37537637
925
+ rock Q8063
926
+ cabbage Q14328596
927
+ heat Q44432
928
+ copper Q753
929
+ doll Q168658
930
+ cellulose Q80294
931
+ finalization Q17089828
932
+ sink Q140565
933
+ jiangsu Q16963
934
+ fallacy Q186150
935
+ admonition Q4683829
936
+ check in Q1068755
937
+ malt Q152024
938
+ liberal arts Q13539073
939
+ natural gas Q40858
940
+ cobra Q2303322
941
+ fitness Q331710
942
+ internet addiction Q831735
943
+ loss Q20888800
944
+ makeup Q107196431
945
+ dock Q124282
946
+ publishing Q3972943
947
+ greens Q20134
948
+ shoes Q22676
949
+ vulture Q123120
950
+ beads Q27964852
951
+ team Q327245
952
+ camping Q455550
953
+ solid Q11438
954
+ summary Q776754
955
+ wings Q622988
956
+ eggshell Q2731253
957
+ head Q23640
958
+ polymer Q81163
959
+ cool down Q30612297
960
+ gaseous Q11432
961
+ coke Q192795
962
+ food crop Q31839438
963
+ casualty Q1056901
964
+ court Q41487
965
+ beach Q40080
966
+ crops Q235352
967
+ ore Q102798
968
+ clock Q376
969
+ cup Q81727
970
+ page Q1069725
971
+ stock market Q475000
972
+ viper Q192056
973
+ base station Q1379116
974
+ pollutants Q19829510
975
+ tank Q12876
976
+ laboratory Q483242
977
+ drug Q8386
978
+ electricity generation Q383973
979
+ sickness Q12136
980
+ toothbrush Q134205
981
+ stegosaurus Q14388
982
+ jump Q1151752
983
+ orangutan Q41050
984
+ hangzhou Q4970
985
+ uniform Q7434
986
+ crowd Q13430821
987
+ consolation Q1990010
988
+ ascend Q5926364
989
+ emissions Q10711575
990
+ forest Q4421
991
+ tripod Q683906
992
+ fries Q152088
993
+ bit Q8805
994
+ canada Q16
995
+ air Q7391292
996
+ sincere Q3372957
997
+ hour Q25235
998
+ carrots Q5046723
999
+ income distribution Q3915542
1000
+ object Q488383
1001
+ blood Q7873
1002
+ detoxification Q1090423
1003
+ increment Q54453625
1004
+ inside Q109810863
1005
+ blossoms Q2047589
1006
+ indulge Q28452346
1007
+ scholars Q2248623
1008
+ battery Q240313
1009
+ guest room Q111122056
1010
+ bath Q22889
1011
+ cotton Q11457
1012
+ design Q82604
1013
+ ability Q109581753
1014
+ stroke Q12202
1015
+ harmful Q84426668
1016
+ record Q1241356
1017
+ clever Q362872
1018
+ mobile Q784445
1019
+ silicone Q146439
1020
+ siberian tiger Q69581
1021
+ ethics Q9465
1022
+ chassis Q1068107
1023
+ tuberculosis Q12204
1024
+ box Q188075
1025
+ odd Q215302
1026
+ isosceles triangle Q875937
1027
+ tub Q38942
1028
+ juror Q16102026
1029
+ phytoplankton Q184755
1030
+ call for help Q48631343
1031
+ toy Q11422
1032
+ magnetic field Q11408
1033
+ bookshelf Q107326062
1034
+ thermos Q23664
1035
+ typewriter Q46335
1036
+ vitamins Q34956
1037
+ drink Q40050
1038
+ happiness Q8
1039
+ diseases Q12136
1040
+ scarcity Q815758
1041
+ patient Q181600
1042
+ banyan tree Q465570
1043
+ toronto Q172
1044
+ listener Q7747542
1045
+ great wall Q1117001
1046
+ lotion Q1871151
1047
+ clockwork Q1400734
1048
+ believe Q815612
1049
+ poorly Q6084258
1050
+ anhui Q40956
1051
+ coral skeleton Q81171102
1052
+ university Q3918
1053
+ go home Q5574826
1054
+ warship Q3114762
1055
+ boat Q35872
1056
+ mountain stream Q2570370
1057
+ internet search Q59639813
1058
+ waiting Q16944487
1059
+ hollywood Q34006
1060
+ bee Q7391
1061
+ title page Q1339862
1062
+ send Q118093
1063
+ berlin Q64
1064
+ warriors Q739702
1065
+ burning Q911076
1066
+ astronaut Q11631
1067
+ dog Q144
1068
+ pig Q787
1069
+ injection Q245359
1070
+ sharp Q2542618
1071
+ fighter plane Q127771
1072
+ allosaurus Q14400
1073
+ stock Q1196314
1074
+ music Q638
1075
+ aircraft carrier Q17205
1076
+ animal Q729
1077
+ rice porridge Q35661296
1078
+ water purifier Q43339363
1079
+ cultural relics Q15814324
1080
+ oil Q42962
1081
+ venus Q313
1082
+ stapling Q112805200
1083
+ plastic film mulch Q37739088
1084
+ bathroom Q190771
1085
+ innovating Q107519268
1086
+ social contact Q28755323
1087
+ pack Q1758639
1088
+ portion Q2105114
1089
+ great white shark Q129026
1090
+ watercolor Q50030
1091
+ steel Q11427
1092
+ jury Q837675
1093
+ miss manners Q6303561
1094
+ college Q189004
1095
+ mascara Q324120
1096
+ natural phenomenon Q1322005
1097
+ unhappy Q20706561
1098
+ lock Q105731
1099
+ stadium Q483110
1100
+ chef Q3499072
1101
+ legal punishment Q104778027
1102
+ spouse Q1196129
1103
+ run Q1360926
1104
+ army Q37726
1105
+ illumination Q6000365
1106
+ conclusion Q20665666
1107
+ millilitre Q2332346
1108
+ amusement park Q194195
1109
+ aloe vera Q80079
1110
+ rust Q184197
1111
+ factory Q83405
1112
+ computer desk Q363931
1113
+ exchange Q179076
1114
+ normal Q273176
1115
+ subsequently Q69693864
1116
+ home Q7743
1117
+ turtleneck Q1544262
1118
+ electrical appliance Q2425052
1119
+ component Q1310239
1120
+ palm Q2001588
1121
+ hunan Q45761
1122
+ vase Q191851
1123
+ ring Q46847
1124
+ crust Q4232578
1125
+ barracks Q131263
1126
+ toward Q7829561
1127
+ war Q198
1128
+ horn Q163759
1129
+ space station Q25956
1130
+ drop Q185789
1131
+ shrub Q42295
1132
+ flagship store Q1426795
1133
+ cold Q270952
1134
+ snake Q2102
1135
+ megabit Q3332814
1136
+ shovel Q7220961
1137
+ windows Q1406
1138
+ slaying Q844482
1139
+ guangdong Q15175
1140
+ dyes Q37456277
1141
+ advise Q290716
1142
+ pouch Q949930
1143
+ software development Q638608
1144
+ clothing Q11460
1145
+ phs Q339353
1146
+ tax Q8161
1147
+ flag Q14660
1148
+ development Q7562091
1149
+ fight Q650711
1150
+ purified water Q59349761
1151
+ happy Q8
1152
+ early warning Q27813916
1153
+ clean Q107998880
1154
+ employee Q703534
1155
+ nap Q5242962
1156
+ torrential rain Q2238184
1157
+ riverway Q2155636
1158
+ feline Q64374257
1159
+ cooking Q38695
1160
+ driving Q999646
1161
+ methane Q37129
1162
+ friend Q17297777
1163
+ movie Q11424
1164
+ classical music Q9730
1165
+ fox Q8331
1166
+ india Q668
1167
+ tears Q76469
1168
+ whiskers Q913572
1169
+ jealous Q42859626
1170
+ hive Q104882831
1171
+ ginseng Q20817212
1172
+ social Q345367
1173
+ store Q1800324
1174
+ drenched Q28451497
1175
+ mountain peak Q207326
1176
+ square Q174782
1177
+ monopoly Q43637
1178
+ pincushion Q1083709
1179
+ spare tire Q2074836
1180
+ pride Q3071551
1181
+ badminton Q7291
1182
+ paper Q11472
1183
+ well Q43483
1184
+ infrastructure Q121359
1185
+ cd Q974
1186
+ grapefruit Q21552830
1187
+ construction industry Q13405640
1188
+ lushan Q622616
1189
+ phone Q202064
1190
+ wardrobe Q106106
1191
+ non-ferrous metals Q96326026
1192
+ sign Q3695082
1193
+ decoration Q11796413
1194
+ paperback Q193934
1195
+ rabbit Q9394
1196
+ character Q3241972
1197
+ pamper Q492768
1198
+ black Q23445
1199
+ boots Q190868
1200
+ tv series Q5398426
1201
+ static Q1940914
1202
+ pistol Q1907525
1203
+ snow Q7561
1204
+ fan Q193432
1205
+ traditional chinese medicine Q200253
1206
+ airport Q1248784
1207
+ tidy Q27898977
1208
+ idea Q131841
1209
+ desk Q1064858
1210
+ hydrogen chloride Q211086
1211
+ the universe Q1
1212
+ melting Q106080
1213
+ tropical Q20755687
1214
+ tight Q1937716
1215
+ off Q21818619
1216
+ temperament Q80157
1217
+ litre Q11582
1218
+ fruit Q3314483
1219
+ weight loss Q718113
1220
+ crossing the line Q3489000
1221
+ harvesting Q213753
1222
+ smuggling Q184840
1223
+ pub Q212198
1224
+ choir Q131186
1225
+ kid Q7569
1226
+ constellation Q8928
1227
+ regulatory Q70330436
1228
+ rear-end collision Q375102
1229
+ exhale Q2215554
1230
+ exterior Q1385033
1231
+ recruit Q1141116
1232
+ grass Q643352
1233
+ trademark Q167270
1234
+ exhausted Q15729017
1235
+ diamond drill Q100293148
1236
+ textbook Q83790
1237
+ milk powder Q496696
1238
+ sad Q281928
1239
+ battlefield Q4895508
1240
+ lights Q9128
1241
+ glasses Q27334842
1242
+ ink Q127418
1243
+ outsiders Q1055332
1244
+ dislike Q111653591
1245
+ return Q65088609
1246
+ recovery Q38127868
1247
+ mine disaster Q1550225
1248
+ climb Q1499786
1249
+ lady Q467
1250
+ cooked Q104439289
1251
+ rare metals Q15760439
1252
+ previously Q79030196
1253
+ safe Q471898
1254
+ housekeeping Q708514
1255
+ books Q571
1256
+ storage Q9158768
1257
+ same Q27135598
1258
+ brewing Q869095
1259
+ identical Q842346
1260
+ infection Q166231
1261
+ full moon Q104641
1262
+ hinge Q244330
1263
+ submerge Q29558624
1264
+ cat Q4167836
1265
+ flourishing Q5462027
1266
+ true Q16751793
1267
+ goals Q4503831
1268
+ elk Q61480
1269
+ concrete Q22657
1270
+ tiger Q19939
1271
+ private enterprise Q110583109
1272
+ cent Q58093
1273
+ vixen Q1865430
1274
+ soon Q3543591
1275
+ the great wall Q12501
1276
+ monorail Q187934
1277
+ cultivation Q488798
1278
+ sandwich Q28803
1279
+ modern art Q38166
1280
+ iris Q178748
1281
+ the uk Q145
1282
+ conception Q37754875
1283
+ delisting Q73545549
1284
+ oxygen atoms Q38674435
1285
+ speakers Q79935565
1286
+ bracelet Q201664
1287
+ shanghai Q8686
1288
+ carry Q432449
1289
+ truth Q7949
1290
+ troupe Q37555509
1291
+ weaving Q192296
1292
+ selfie Q12068677
1293
+ congestion Q102187260
1294
+ cardboard Q389782
1295
+ literary works Q16321420
1296
+ article Q191067
1297
+ white people Q235155
1298
+ games Q11410
1299
+ machine Q11019
1300
+ score Q522344
1301
+ dollar Q14083
1302
+ consulting room Q73371931
1303
+ warrior Q1250916
1304
+ gymnosperms Q59618763
1305
+ weibo Q9387
1306
+ philosophy department Q109246805
1307
+ jail Q40357
1308
+ pneumonia Q12192
1309
+ go Q11413
1310
+ murder Q132821
1311
+ hamburger Q51974
1312
+ holster Q1475429
1313
+ dinner Q657221
1314
+ grade Q18185
1315
+ confucius Q4604
1316
+ medical equipment Q6657015
1317
+ new Q33979
1318
+ sweet and sour Q104383007
1319
+ fall Q11620540
1320
+ r&d Q276099
1321
+ biscuits Q19801756
1322
+ wooden table Q64572893
1323
+ full Q178026
1324
+ slippery road Q2718084
1325
+ pointer Q118155
1326
+ lenses Q60415975
1327
+ mountains Q8502
1328
+ walnut Q208021
1329
+ calculate Q622821
1330
+ cattle and sheep Q23929134
1331
+ currency Q8142
1332
+ researcher Q1650915
1333
+ gothic architecture Q176483
1334
+ journal Q49850
1335
+ incompetent Q62562253
1336
+ organism Q7239
1337
+ player Q4197743
1338
+ product Q2424752
1339
+ ant Q781
1340
+ body shape Q25212061
1341
+ tedious Q22471755
1342
+ hefei Q185684
1343
+ treatment Q179661
1344
+ sowing Q777671
1345
+ wrong Q29485
1346
+ soap Q34396
1347
+ core Q23595
1348
+ cattle Q830
1349
+ mrs. Q313549
1350
+ attractions Q18237485
1351
+ dishonesty Q5282225
1352
+ monitor Q137823
1353
+ show Q15116915
1354
+ division Q169534
1355
+ lamb Q4575936
1356
+ land Q11081619
1357
+ vote Q42904171
1358
+ leopard Q34706
1359
+ arrest Q1403016
1360
+ tile Q468402
1361
+ compressor Q178898
1362
+ autumn harvest Q41775274
1363
+ bad Q44142
1364
+ piano Q5994
1365
+ cockroach Q18123008
1366
+ bamboo Q27891820
1367
+ carbon Q623
1368
+ want Q26256512
1369
+ grain Q2995529
1370
+ speed bumps Q112881805
1371
+ salad Q9266
1372
+ droplets Q97502608
1373
+ huangshan Q180470
1374
+ compiling Q12769326
1375
+ utensil Q1357761
1376
+ security guard Q856887
1377
+ cleaning Q17200001
1378
+ firewood Q35808
1379
+ satellite Q1297322
1380
+ stitch Q602854
1381
+ fishery Q180538
1382
+ engine Q44167
1383
+ negligence Q160070
1384
+ pony Q188828
1385
+ flight Q206021
1386
+ hyperbole Q181036
1387
+ poem Q5185279
1388
+ font Q4868296
1389
+ commercial vehicle Q580939
1390
+ nail clippers Q270380
1391
+ line Q37105
1392
+ output Q1150771
1393
+ tambourine Q193666
1394
+ auctioneer Q2743689
1395
+ guangming daily Q3102631
1396
+ haze Q643546
1397
+ spice Q42527
1398
+ root Q111029
1399
+ mass Q11423
1400
+ agriculture Q11451
1401
+ canvas Q4259259
1402
+ sonata Q131269
1403
+ running Q105674
1404
+ mosquito Q7367
1405
+ large Q10989264
1406
+ prime minister Q14212
1407
+ able Q16157710
1408
+ minerals Q7946
1409
+ scales Q134566
1410
+ expensive Q46944820
1411
+ bloom Q2987425
1412
+ dad Q7565
1413
+ quail Q6072584
1414
+ fuel Q42501
1415
+ athletics Q542
1416
+ musician Q639669
1417
+ abnormal Q4668171
1418
+ commemoration day Q21190816
1419
+ strings Q709099
1420
+ behind Q16938807
1421
+ pane Q2416562
1422
+ police Q35535
1423
+ scoundrel Q1305415
1424
+ limousine Q188475
1425
+ mediterranean Q4918
1426
+ transparent Q487623
1427
+ sorghum Q105549747
1428
+ parishioner Q55876931
1429
+ load Q913999
1430
+ lazy Q573952
1431
+ snack Q749316
1432
+ brick Q40089
1433
+ cookware Q1521410
1434
+ disney Q7414
1435
+ navigator Q254651
1436
+ hate Q160232
1437
+ beaver Q47542
1438
+ mend Q18035603
1439
+ gourd Q7370671
1440
+ dive Q3389076
1441
+ relief Q245117
1442
+ program Q170584
1443
+ welding Q131172
1444
+ pear blossom Q106767198
1445
+ dameisha Q25101745
1446
+ bacon Q11106
1447
+ lion Q140
1448
+ metabolism Q1057
1449
+ leaf Q33971
1450
+ nutrient Q181394
1451
+ birds Q5113
1452
+ indolent Q18573407
1453
+ collar Q497903
1454
+ tides Q1779406
1455
+ away Q55604566
1456
+ reasonable Q845329
1457
+ apple Q89
1458
+ strength Q605035
1459
+ places of interest Q3469818
1460
+ downhill Q319899
1461
+ star Q523
1462
+ paper cut Q1630633
1463
+ distant Q126017
1464
+ curtains Q899625
1465
+ application Q166142
1466
+ hand Q33767
1467
+ silicon Q670
1468
+ bamboo slips Q107489970
1469
+ bangalore Q1355
1470
+ promotions Q24024808
1471
+ tragic Q39073224
1472
+ introvert Q106765930
1473
+ hearing Q160289
1474
+ waning moon Q34604447
1475
+ yogurt Q13317
1476
+ effort Q170584
1477
+ environment Q2249676
1478
+ steamer Q178193
1479
+ granulated sugar Q4409456
1480
+ plate Q57216
1481
+ anterior Q1867507
1482
+ wheat Q15645384
1483
+ automobile Q1420
1484
+ greenhouse Q165044
1485
+ lodge Q108840195
1486
+ air conditioner Q1265533
1487
+ student Q48282
1488
+ eas Q1318054
1489
+ tree roots Q4233325
1490
+ wok Q208364
1491
+ discrimination Q169207
1492
+ religion Q9174
1493
+ water heater Q16630809
1494
+ rich Q106804044
1495
+ text message Q13159882
1496
+ donjon Q91165
1497
+ daisy Q26158
1498
+ nuclear test Q210112
1499
+ drug driving Q104763414
1500
+ album Q482994
1501
+ organized Q110916833
1502
+ fungus Q764
1503
+ calf Q2935
1504
+ raw Q18754
1505
+ back Q133279
1506
+ consulting Q63769412
1507
+ delicious Q329192
1508
+ administration Q5283295
1509
+ watch Q178794
1510
+ spontaneous combustion Q369012
1511
+ los angeles Q65
1512
+ deforestation Q169940
1513
+ color Q1075
1514
+ martial arts Q11417
1515
+ tyrannosaurus Q14332
1516
+ spinach Q36814998
1517
+ intranet Q483426
1518
+ january Q108
1519
+ mountain Q8502
1520
+ extravert Q9257166
1521
+ nuclear energy Q12739
1522
+ spray Q1424833
1523
+ boarding Q887540
1524
+ teaching Q352842
1525
+ humanities Q80083
1526
+ golden monkey Q775343
1527
+ radiation Q18335
1528
+ paddy Q15838080
1529
+ tea leaf Q484083
1530
+ bow Q46311
1531
+ starving Q27499098
1532
+ knowledge Q9081
1533
+ usa Q30
1534
+ compute Q622821
1535
+ chemistry Q2329
1536
+ frightened Q79322171
1537
+ water pollution Q183129
1538
+ stove Q203789
1539
+ protestantism Q23540
1540
+ teacher Q37226
1541
+ bench scale Q26377667
1542
+ eagle Q2092297
1543
+ sales management Q5657855
1544
+ e-commerce Q484847
1545
+ humans Q5
1546
+ landscape Q107425
1547
+ habit Q1299714
1548
+ doctor Q96633923
1549
+ tall Q1770
1550
+ before Q79030196
1551
+ nuclear submarine Q757554
1552
+ wine Q282
1553
+ female doctor Q110228881
1554
+ implement Q39546
1555
+ gini coefficient Q162455
1556
+ chicken Q780
1557
+ iron ore Q191552
1558
+ gunpowder Q12861
1559
+ historical fiction Q1196408
1560
+ rebuttal Q1410600
1561
+ monday Q105
1562
+ subsidies Q17054224
1563
+ doze Q37019865
1564
+ protein Q8054
1565
+ pipa Q6685124
1566
+ furniture Q14745
1567
+ pence Q234129
1568
+ kilogram Q11570
1569
+ vapor Q255722
1570
+ irritate Q6073879
1571
+ chess Q718
1572
+ reading Q199657
1573
+ bovid Q25497
1574
+ artwork Q838948
1575
+ leather bag Q1107811
1576
+ emphasis Q920915
1577
+ london Q84
1578
+ shilling Q213142
1579
+ homogeneous Q110081157
1580
+ merchants Q6818448
1581
+ holidays Q107383247
1582
+ watermelon Q38645
1583
+ insurance Q43183
1584
+ physicist Q169470
1585
+ lime water Q78948116
1586
+ plant Q756
1587
+ colorless Q1396399
1588
+ bike Q11442
1589
+ nanjing Q16666
1590
+ bleach Q11587
1591
+ loyalty Q1132131
1592
+ ankle Q168002
1593
+ immortal Q208417
1594
+ aim Q4503831
1595
+ benefit Q101097118
1596
+ seagull Q2699803
1597
+ flask Q95685937
1598
+ branch Q2923673
1599
+ preservatives Q56429795
1600
+ jam Q1269
1601
+ shoot Q220869
1602
+ soy sauce Q229385
1603
+ skipping rope Q244158
1604
+ alcohol Q47146337
1605
+ choker Q1424519
1606
+ camera Q15328
1607
+ man Q8441
1608
+ physical changes Q112902505
1609
+ sheet music Q187947
1610
+ vocabulary Q6499736
1611
+ sequoia Q1975652
1612
+ yellow Q943
1613
+ sparrow Q28922
1614
+ visible light Q76299
1615
+ carp Q2751223
1616
+ outward Q62128996
1617
+ powerful Q20735603
1618
+ civil law Q222249
1619
+ water plants Q103312200
1620
+ oxides Q50690
1621
+ thesis Q1266946
1622
+ silent Q103827699
1623
+ notes Q89200784
1624
+ western europe Q27496
1625
+ switch Q5320
1626
+ trousers Q39908
1627
+ shingles Q182155
1628
+ receive Q76664785
1629
+ fluorescent lamp Q182925
1630
+ opposed Q1498298
1631
+ inertia Q122508
1632
+ upslope Q17107707
1633
+ vinegar Q41354
1634
+ galaxy Q318
1635
+ jar Q1207302
1636
+ exhibition Q464980
1637
+ stingy Q7617440
1638
+ out Q1153773
1639
+ submarine Q2811
1640
+ worker Q327055
1641
+ ideation Q17039022
1642
+ dry Q1470363
1643
+ network card Q165233
1644
+ insincere Q112112091
1645
+ innovation Q174165
1646
+ produce Q1913301
1647
+ editing Q194105
1648
+ muffler Q1165721
1649
+ sane Q7157308
1650
+ hospital Q16917
1651
+ renewable energy Q12705
1652
+ anti-scratch Q16829014
1653
+ fluidity Q3074482
1654
+ point Q44946
1655
+ land sea Q64141914
1656
+ processor Q1466064
1657
+ selling Q3380760
1658
+ flock Q120997
1659
+ shanxi Q46913
1660
+ magnolia Q157017
1661
+ chemist Q593644
1662
+ platypus Q15343
1663
+ litter Q476850
1664
+ cash Q693464
1665
+ southeast Q6452640
1666
+ device Q3966
1667
+ formulation Q3077570
1668
+ pot Q2845
1669
+ breakthrough Q2920535
1670
+ exit Q854429
1671
+ down Q15332388
1672
+ sun Q132
1673
+ gourmet Q22261015
1674
+ soil Q36133
1675
+ cell phone Q17517
1676
+ gas Q11432
1677
+ steamed bread Q7605452
1678
+ sales Q194189
1679
+ microcomputer Q32738
1680
+ unorganized Q2141879
1681
+ beverage Q40050
1682
+ fish head Q5454629
1683
+ buttons Q1573122
1684
+ woody plants Q111694088
1685
+ gear Q143828
1686
+ button Q160464
1687
+ vertebrate Q110551902
1688
+ external Q66571843
1689
+ net Q986291
1690
+ cry Q152247
1691
+ housing Q1247867
1692
+ front Q899192
1693
+ flowers Q506
1694
+ mirror Q146701
1695
+ blossom Q2047589
1696
+ immediacy Q2811064
1697
+ erhu Q726306
1698
+ wristband Q1549506
1699
+ finance Q43015
1700
+ fans Q193432
1701
+ diary Q185598
1702
+ casting the net Q60346683
1703
+ seafood Q192935
1704
+ crazy Q505619
1705
+ mobile phone Q17517
1706
+ rake Q200822
1707
+ calcium deficiency Q44705078
1708
+ chinchilla Q1073656
1709
+ favor Q1263003
1710
+ curing Q1503716
1711
+ weighing Q12779002
1712
+ exam Q107383235
1713
+ reduced production Q37211094
1714
+ fingers Q2364697
1715
+ classroom Q621114
1716
+ compass Q103896
1717
+ bomber Q170877
1718
+ property Q937228
1719
+ brooch Q499916
1720
+ necessary Q2301186
1721
+ sugar Q11002
1722
+ pavilion Q276173
1723
+ failure Q1121708
1724
+ election Q40231
1725
+ unable Q21655367
1726
+ ultrasound Q162564
1727
+ valley Q39816
1728
+ computer Q68
1729
+ cow Q11748378
1730
+ skateboard Q15783
1731
+ lost Q23567
1732
+ ice Q23392
1733
+ rubber Q18113858
1734
+ symbols Q80071
1735
+ keyboard Q1921606
1736
+ vibration Q3695508
1737
+ clay Q42302
1738
+ lad Q3010
1739
+ review Q265158
1740
+ academia Q1211427
1741
+ silver dollar Q1186710
1742
+ decrement Q47496130
1743
+ inbound Q74424273
1744
+ difficult Q66741662
1745
+ superstition Q133182
1746
+ einstein Q937
1747
+ high speed ​​railway Q858485
1748
+ kite Q42861
1749
+ catalyst Q12385831
1750
+ above Q15332375
1751
+ buffalo Q40435
1752
+ bulb Q188748
1753
+ sweater Q232191
1754
+ iron Q677
1755
+ waterway Q1267889
1756
+ investment Q4290
1757
+ simple Q508291
1758
+ generous Q3100542
1759
+ blood vessel Q988343
1760
+ radish Q33669098
1761
+ descend Q15805316
1762
+ paper-cut Q1630633
1763
+ wool Q42329
1764
+ action Q4026292
1765
+ commander Q6620231
1766
+ unwilling Q20737431
1767
+ leg Q133105
1768
+ manner Q367293
1769
+ porcelain Q130693
1770
+ gloves Q169031
1771
+ after Q79030284
1772
+ goose Q16529344
1773
+ library Q7075
1774
+ gold Q897
1775
+ intelligent Q2615500
1776
+ corruption Q366
1777
+ jewellery Q161439
1778
+ jacket Q849964
1779
+ festival Q132241
1780
+ chinese characters Q8201
1781
+ private hospital Q4284971
1782
+ phenomenon Q483247
1783
+ altitude Q190200
1784
+ orthography Q43091
1785
+ condiment Q2596997
1786
+ black tea Q203415
1787
+ tail Q60960
1788
+ cheongsam Q836934
1789
+ bouquet Q1187930
1790
+ tired Q15729017
1791
+ jackal Q125525
1792
+ mammals Q7377
1793
+ butterfly Q11946202
1794
+ iodine Q1103
1795
+ bread Q7802
1796
+ drilling Q890886
1797
+ seaport Q15310171
1798
+ osteoporosis Q165328
1799
+ fishing boat Q106619054
1800
+ frankness Q19357492
1801
+ beetle Q22671
1802
+ toaster Q14890
1803
+ civil servant Q212238
1804
+ taste buds Q862867
1805
+ dress Q200539
1806
+ receipt Q330190
1807
+ text Q234460
1808
+ theft Q2727213
1809
+ balance Q1365641
1810
+ epidemic Q44512
1811
+ bidding Q2142250
1812
+ cars Q182153
1813
+ wing chun Q217127
1814
+ instrument Q109564569
1815
+ sadness Q169251
1816
+ childhood Q276258
1817
+ analog Q50824047
1818
+ radio Q872
1819
+ battle Q178561
1820
+ miscalculation Q21096985
1821
+ wheel Q446
1822
+ calligraphy Q12681
1823
+ county Q28575
1824
+ heroin Q60168
1825
+ orchard Q236371
1826
+ white fungus Q7994710
1827
+ mother Q7560
1828
+ wig Q105507
1829
+ steel industry Q3406654
1830
+ fragility Q2645227
1831
+ paragraph Q194431
1832
+ tv Q672
1833
+ sidewalk Q177749
1834
+ college student Q315247
1835
+ silica Q15724995
1836
+ plane Q17285
1837
+ scissors Q40847
1838
+ month Q5151
1839
+ seedling Q1385709
1840
+ narrow Q18162636
1841
+ tv station Q1616075
1842
+ tai chi Q167987
1843
+ bottle Q80228
1844
+ audience Q211198
1845
+ colouring Q2022532
1846
+ letter Q133492
1847
+ ingenious Q16254265
1848
+ acrobat Q11957145
1849
+ chen jingrun Q715284
1850
+ beehive Q165107
1851
+ education Q8434
1852
+ combustion Q133235
1853
+ woodcarving Q337907
1854
+ configuration Q1866689
1855
+ secretary Q80687
1856
+ hourglass Q179904
1857
+ day Q573
1858
+ citizen Q1020994
1859
+ technology Q11016
1860
+ storytelling Q989963
1861
+ drunk driving Q250062
1862
+ victory Q50000
1863
+ pine tree Q59668787
1864
+ lens Q768575
1865
+ jinwen Q45530615
1866
+ extinction Q123509
1867
+ forget Q1377840
1868
+ entrepreneur Q131524
1869
+ aircraft Q11436
1870
+ bucks Q23229
1871
+ beijing Q956
1872
+ trees Q10884
1873
+ herd Q209542
1874
+ broker Q160117
1875
+ night sky Q1153471
1876
+ milk tea Q1884224
1877
+ gap Q16887036
1878
+ item Q11723795
1879
+ order Q36602
1880
+ flowers and trees Q1458430
1881
+ dollars Q4917
1882
+ industrial property Q2750057
1883
+ sniffles Q7547763
1884
+ serpent Q742168
1885
+ liver Q9368
1886
+ social science Q34749
1887
+ disinfect Q19758137
1888
+ shenzhen Q15174
1889
+ lead Q708
1890
+ gigabit Q3105497
1891
+ ox Q473194
1892
+ cowpen Q5179793
1893
+ fabric Q457931
1894
+ drought Q43059
1895
+ lemon Q1093742
1896
+ power Q25107
1897
+ baby Q998
1898
+ activity Q1914636
1899
+ decisive Q18245643
1900
+ lampshade Q947686
1901
+ horde Q915366
1902
+ commercial Q2986261
1903
+ candy Q185583
1904
+ confiscation Q275038
1905
+ string Q184754
1906
+ read Q16881915
1907
+ card Q42965339
1908
+ pool Q11020
1909
+ ornamental trees Q51377271
1910
+ operating system Q9135
1911
+ judge Q16533
1912
+ wet Q843589
1913
+ elevator Q132911
1914
+ class Q37517
1915
+ volcanic eruption Q7692360
1916
+ prison Q40357
1917
+ energy Q11379
1918
+ auto Q1420
1919
+ umbrella Q41607
1920
+ kitchen Q43164
1921
+ flesh Q13119823
1922
+ uphill Q111084907
1923
+ fujian Q41705
1924
+ science fiction Q24925
1925
+ exercise Q29051769
1926
+ fowl Q853058
1927
+ integer Q12503
1928
+ geometry Q8087
1929
+ oil lamp Q821952
1930
+ optical fiber Q162
1931
+ broiler Q326301
1932
+ sweet peach Q20011064
1933
+ atom Q9121
1934
+ emerge Q1050405
1935
+ photo Q125191
1936
+ proceed Q107724490
1937
+ afflicted Q16960397
1938
+ giggle Q170579
1939
+ capsules Q112597033
1940
+ comma Q161736
1941
+ starch Q41534
1942
+ drawing on Q20490867
1943
+ tape measure Q214649
1944
+ door Q36794
1945
+ velvet antler Q4344358
1946
+ poplar Q30456678
1947
+ input Q1125955
1948
+ loom Q173056
1949
+ farmer Q131512
1950
+ drinks Q40050
1951
+ foraging Q2916569
1952
+ reporter Q42909
1953
+ wipers Q1889932
1954
+ language Q315
1955
+ works Q24897655
1956
+ lace Q231250
1957
+ host Q221673
1958
+ road Q34442
1959
+ control Q29017603
1960
+ shrubbery Q7504315
1961
+ hunger Q165947
1962
+ personality Q641118
1963
+ quyi Q7273050
1964
+ stage Q194428
1965
+ reference Q121769
1966
+ russia Q159
1967
+ bookmarks Q112578360
1968
+ fever Q38933
1969
+ arranged Q4795846
1970
+ space Q380933
1971
+ empty Q67932942
1972
+ adjust Q317158
1973
+ script Q33260112
1974
+ forward Q280658
1975
+ circuit Q718570
1976
+ anger Q79871
1977
+ xiaogang village Q97027313
1978
+ practice Q334600
1979
+ tasteless Q30100868
1980
+ aluminum Q663
1981
+ confused Q557945
1982
+ risk Q104493
1983
+ taciturn Q305418
1984
+ thinker Q24885626
1985
+ woman Q467
1986
+ hybrid cat Q111733338
1987
+ science Q336
1988
+ red Q3142
1989
+ food Q2095
1990
+ vague Q37241976
1991
+ catch Q2941721
1992
+ plastic Q11474
1993
+ plum Q12372598
1994
+ huawei Q160120
1995
+ market behavior Q51036317
1996
+ securities Q1416279
1997
+ sour Q1928899
1998
+ sleeve Q256458
1999
+ office Q182060
2000
+ drummer Q386854
2001
+ fatty meat Q107262066
2002
+ tabletop Q48885552
2003
+ sociology Q21201
2004
+ roll film Q1599148
2005
+ peanut Q37383
2006
+ traffic police Q16874635
2007
+ lumpy Q9005258
2008
+ warm spring Q7969566
2009
+ wonderland Q3402263
2010
+ teacup Q81707
2011
+ port Q44782
2012
+ idioms Q34770
2013
+ detective comics Q1799866
2014
+ pursue Q11639276
2015
+ shopping mall Q31374404
2016
+ yangtze river Q5413
2017
+ hairpin Q1566584
2018
+ argumentation Q15766977
2019
+ combat Q650711
2020
+ stability Q2325497
2021
+ warming Q4311765
2022
+ act Q421744
2023
+ roses Q11822
2024
+ letters Q1277575
2025
+ strong Q991202
2026
+ climate change Q125928
2027
+ timpani Q189737
2028
+ illness Q814207
2029
+ sleeves Q79478214
2030
+ discovery Q12772819
2031
+ energetic Q3545708
2032
+ anchor Q168432
2033
+ officer Q61022630
2034
+ noise Q179448
2035
+ mount huang Q106865609
2036
+ questionnaire Q747810
2037
+ excited Q5419647
2038
+ college students Q23915873
2039
+ flowing Q355304
2040
+ apply Q4781618
2041
+ angry Q37097368
2042
+ official Q599151
2043
+ trace elements Q11781028
2044
+ brightness Q221656
2045
+ icicle Q496380
2046
+ human body Q23852
2047
+ bronze Q34095
2048
+ lotus pond Q698996
2049
+ hieroglyphic Q193762
2050
+ mining Q44497
2051
+ screen Q79137673
2052
+ hydrogen Q556
2053
+ spoon Q81895
2054
+ art dealer Q173950
2055
+ training Q918385
2056
+ illegal sales Q50622697
2057
+ close Q12731
2058
+ computer hardware Q3966
2059
+ sports Q349
2060
+ incorrect Q29485
2061
+ loyal Q1569314
2062
+ launch Q659600
2063
+ park Q22698
2064
+ caries Q133772
2065
+ medications Q3304003
2066
+ taizhou Q57931
2067
+ croissant Q207832
2068
+ gravity Q11412
2069
+ illuminated Q740910
2070
+ desk lamp Q3216816
2071
+ brain Q1073
2072
+ railroad Q22667
2073
+ earth Q2
2074
+ tire Q169545
2075
+ wood Q287
2076
+ decrease Q5249659
2077
+ mountain range Q46831
2078
+ legal act Q1864008
2079
+ chocolate Q195
2080
+ poverty alleviation Q1824165
2081
+ bud Q189838
2082
+ reaction Q343546
2083
+ ukulele Q61285
2084
+ leaves Q1807128
2085
+ pen Q165447
2086
+ dismiss Q17052147
2087
+ pigment Q161179
2088
+ heavy industry Q622662
2089
+ lamp Q1138737
2090
+ island Q23442
2091
+ horse Q726
2092
+ tea Q6097
2093
+ mil Q184194
2094
+ list Q12139612
2095
+ enrage Q18031357
2096
+ calcium supplementation Q65386997
2097
+ harbor Q283202
2098
+ dawn Q11326182
2099
+ seat Q2207370
2100
+ long Q10786776
2101
+ chronic diseases Q15816392
2102
+ antipathy Q581459
2103
+ cao cao Q204077
2104
+ electron Q2225
2105
+ parcel Q13107365
2106
+ uk Q145
2107
+ villa Q3950
2108
+ misjudgment Q47817360
2109
+ torment Q2290980
2110
+ divan Q2354542
2111
+ bottom Q11812678
2112
+ pestis Q60300035
2113
+ planning Q309100
2114
+ industry Q8148
2115
+ warm Q13099607
2116
+ mahogany Q958012
2117
+ forage Q13377214
2118
+ tofu Q177378
2119
+ protestant Q110714168
2120
+ watering Q11453
2121
+ supervising Q28007056
2122
+ travel Q61509
2123
+ puppy Q39266
2124
+ sunscreen Q827658
2125
+ new moon Q108566
2126
+ printing Q11034
2127
+ love Q316
2128
+ country Q6256
2129
+ infant Q998
2130
+ internet Q75
2131
+ germany Q183
2132
+ dolphin Q7369
2133
+ colorful Q1368665
2134
+ sweet Q2003356
2135
+ monkey Q1367
2136
+ online games Q60315954
2137
+ clone Q3308178
2138
+ convenience Q5166129
2139
+ driving safely Q72128824
2140
+ hearing aids Q323808
2141
+ outbound Q7111875
2142
+ company Q783794
2143
+ beginning Q529711
2144
+ digestive system Q9649
2145
+ guangzhou Q16572
2146
+ policy Q1156854
2147
+ faith Q5410500
2148
+ help Q1643184
2149
+ pastry Q477248
2150
+ baiyangdian Q2665615
2151
+ interview Q178651
2152
+ emotion Q9415
2153
+ crayons Q107383376
2154
+ delivery Q2334804
2155
+ honeysuckle Q53865327
2156
+ appliance Q1183543
2157
+ syllogism Q107342
2158
+ liquid Q11435
2159
+ mood Q331769
2160
+ flash Q221836
2161
+ disagreement Q104813442
2162
+ trunk Q193472
2163
+ birch Q865564
2164
+ squirrel Q9482
2165
+ dig Q285726
2166
+ south Q667
2167
+ lake Q23397
2168
+ girl Q3031
2169
+ paris Q90
2170
+ cut Q1308978
2171
+ aluminium Q663
2172
+ hieroglyph Q193762
2173
+ steelyard Q1650330
2174
+ cloth Q5849500
2175
+ fatigue Q9690
2176
+ haiku Q37707
2177
+ legs Q133105
2178
+ kids Q646426
2179
+ staircase Q12511
2180
+ up Q1498
2181
+ rainbow Q1052
2182
+ accountability Q2798912
2183
+ printer Q6500733
2184
+ windbreaks Q51379512
2185
+ cover Q331481
2186
+ refrigeration Q747713
2187
+ fat Q127980
2188
+ office hall Q14713005
2189
+ strike Q49776
2190
+ obsolete Q107356532
2191
+ equipment Q10273457
2192
+ flame Q235544
2193
+ crutches Q95444384
2194
+ soft Q18037771
2195
+ learning to drive Q18151617
2196
+ documents Q9344
2197
+ judicial Q105985
2198
+ glaciers Q53865454
2199
+ ion Q36496
2200
+ willing Q37262525
2201
+ carbonated beverage Q13417200
2202
+ dangerous Q44104
2203
+ backward Q16938807
2204
+ thrush Q1249178
2205
+ power generation Q26376690
2206
+ anti-impact Q100159782
2207
+ part Q15989253
2208
+ inland lake Q31805992
2209
+ unfamiliar Q7884320
2210
+ animals Q729
2211
+ spring ploughing Q18890897
2212
+ crow Q43365
2213
+ giant panda Q33602
2214
+ broadcast Q109507868
2215
+ campaign Q18812548
2216
+ wheat kernel Q46988452
2217
+ sacrifice Q179723
2218
+ pet rabbit Q149017
2219
+ vacant Q56056305
2220
+ common Q1305037
2221
+ unaware Q56736680
2222
+ railway Q22667
2223
+ dry powder Q1334805
2224
+ cicada Q1947892
2225
+ cells Q7868
2226
+ listing Q798505
2227
+ penny Q234129
2228
+ unfortunate Q20077244
2229
+ socrates Q913
2230
+ crate Q605384
2231
+ big eyes Q10858674
2232
+ bloated Q29710539
2233
+ jujube Q3093648
2234
+ disease Q12136
2235
+ criminal police Q6081679
2236
+ stamen Q103129
2237
+ chloroplast Q47263
2238
+ red fuji Q3565037
2239
+ queen bee Q816345
2240
+ liquor Q56139
2241
+ auction Q177923
2242
+ clan Q211503
2243
+ bed Q42177
2244
+ notepad Q43013
2245
+ wide Q2125243
2246
+ skills Q10670181
2247
+ video game Q7889
2248
+ ships Q11446
2249
+ white Q23444
2250
+ concerned Q1324697
2251
+ drinking Q876776
2252
+ skin Q1074
2253
+ tent Q170544
2254
+ poor Q5729485
2255
+ salt Q12370
2256
+ brush Q614467
2257
+ bezoar Q851415
2258
+ employ Q1056396
2259
+ innovation development Q98089548
2260
+ knee pad Q681515
2261
+ passenger car Q753779
2262
+ parish Q102496
2263
+ solar system Q544
2264
+ history Q309
2265
+ mushrooms Q83093
2266
+ sand Q34679
2267
+ health Q12147
2268
+ analysis Q217602
2269
+ stamps Q79701
2270
+ margins Q1145724
2271
+ green Q3133
2272
+ preservation Q830393
2273
+ profit Q2112073
2274
+ leather Q286
2275
+ italy Q38
2276
+ old Q822282
2277
+ universe Q1
2278
+ tea set Q3408351
2279
+ panax notoginseng Q15551438
2280
+ good people Q46999639
2281
+ needle Q1192354
2282
+ fragrant Q5477756
2283
+ bristle Q894231
2284
+ inverse Q7017933
2285
+ anthracite Q182458
2286
+ troops Q10841757
2287
+ conductivity Q4593291
2288
+ national day Q57598
2289
+ survey method Q814232
2290
+ swimsuit Q212989
2291
+ oxygen Q629
2292
+ government Q7188
2293
+ bunk Q107196737
2294
+ recruitment Q899277
2295
+ mineral Q7946
2296
+ lunar eclipse Q44235
2297
+ security Q2526135
2298
+ rampant Q1469340
2299
+ instruments Q50817452
2300
+ german Q188
2301
+ starring Q37156731
2302
+ hunt Q36963
2303
+ bad news Q1460233
2304
+ abstract Q333291
2305
+ sensible Q3478630
2306
+ beg Q127955
2307
+ honest Q5893251
2308
+ freeze Q1105534
2309
+ electric light Q1326621
2310
+ wheat ear Q3966720
2311
+ keys Q21491451
2312
+ rhododendron Q189393
2313
+ agitated Q27067578
2314
+ band leader Q1198887
2315
+ shelf Q2637814
2316
+ tea house Q1072166
2317
+ telescope Q4213
2318
+ scheme Q1155772
2319
+ seizure Q6279182
2320
+ vegetables Q11004
2321
+ ultraviolet ray Q11391
2322
+ impatience Q16523690
2323
+ melody Q170412
2324
+ adhesive Q131790
2325
+ sphygmomanometer Q503968
2326
+ death Q4
2327
+ field army Q51977
2328
+ board of directors Q188628
2329
+ red beans Q489703
2330
+ silk Q37681
2331
+ electricity Q12725
2332
+ celery Q28298
2333
+ barrack Q131263
2334
+ radio waves Q4262
2335
+ car body Q15729598
2336
+ judgment Q3769186
2337
+ we media Q2908608
2338
+ phone line Q26359826
2339
+ poaching Q34577
2340
+ thin Q33686650
2341
+ diamond Q5283
2342
+ antlers Q834007
2343
+ peak Q207326
2344
+ sentence Q41796
2345
+ melon Q5881191
2346
+ forbidden city Q80290
2347
+ pass Q23069713
2348
+ glass Q11469
2349
+ national people's congress Q19211
2350
+ hot Q28128222
2351
+ headquarter Q7540126
2352
+ reduced yield Q40128349
2353
+ computer viruses Q47508768
2354
+ depression Q190429
2355
+ high temperature Q28128222
2356
+ backpack Q5843
2357
+ track Q160342
2358
+ gun Q110155210
2359
+ value Q194112
2360
+ turtle Q223044
2361
+ equator Q23538
2362
+ miserable Q20862847
2363
+ people Q2472587
2364
+ safe landing Q111870615
2365
+ gibbon Q185939
2366
+ hypertension Q95566669
2367
+ insect Q1390
2368
+ freighter Q2957747
2369
+ pest Q219174
2370
+ waves Q73006538
2371
+ vouchers Q59927255
2372
+ hospitalized Q69946132
2373
+ syringe Q273318
2374
+ laces Q56274905
2375
+ accent Q16001535
2376
+ tableware Q851782
2377
+ daydream Q6028924
2378
+ lines Q27910497
2379
+ transportation Q12162227
2380
+ gramm Q41803
2381
+ house Q3947
2382
+ juice Q8492
2383
+ mice Q83310
2384
+ alarm Q212758
2385
+ mamba Q194425
2386
+ stocks Q1484064
2387
+ notebook Q43013
2388
+ air pressure Q67389075
2389
+ rails Q190478
2390
+ characters Q1620031
2391
+ one day Q232161
2392
+ gasoline Q39558
2393
+ game Q11410
2394
+ biodegradation Q696715
2395
+ lollipop Q217446
2396
+ railcar Q752392
2397
+ whale Q1865281
2398
+ below Q15332388
2399
+ high mountain Q29572836
2400
+ logical fallacies Q37645474
2401
+ proofreading Q834191
2402
+ visual art Q36649
2403
+ bird Q5113
2404
+ feast Q132241
2405
+ body Q170494
2406
+ formaldehyde Q161210
2407
+ ivory Q82001
2408
+ rules Q1151067
2409
+ documentary Q4164344
2410
+ chronograph Q1088223
2411
+ moscow Q649
dataset/MARS/analogy_relations.txt ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ P361
2
+ A5
3
+ P828
4
+ A12
5
+ A10
6
+ A2
7
+ A7
8
+ A6
9
+ A16
10
+ P186
11
+ A0
12
+ A17
13
+ A1
14
+ A8
15
+ P31
16
+ A9
17
+ P1552
18
+ P366
19
+ A13
20
+ A3
21
+ A11
22
+ A18
23
+ P461
24
+ P1889
25
+ P276
26
+ A4
27
+ A15
dataset/MarKG/entity2text.txt ADDED
The diff for this file is too large to render. See raw diff
 
dataset/MarKG/entity2textlong.txt ADDED
The diff for this file is too large to render. See raw diff
 
dataset/MarKG/relation2text.txt ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ P276 location
2
+ P47 shares border with
3
+ P140 religion or worldview
4
+ P910 topic's main category
5
+ P279 subclass of
6
+ P101 field of work
7
+ P1889 different from
8
+ P155 follows
9
+ P1343 described by source
10
+ P31 instance of
11
+ P3095 practiced by
12
+ P2283 uses
13
+ P5008 on focus list of Wikimedia project
14
+ P2852 emergency phone number
15
+ P1056 product or material produced
16
+ P1792 category of associated people
17
+ P131 located in the administrative territorial entity
18
+ P2853 electrical plug type
19
+ P27 country of citizenship
20
+ P21 sex or gender
21
+ P2341 indigenous to
22
+ P460 said to be the same as
23
+ P366 has use
24
+ P159 headquarters location
25
+ P17 country
26
+ P1464 category for people born here
27
+ P527 has part(s)
28
+ P530 diplomatic relation
29
+ P1151 topic's main Wikimedia portal
30
+ P1433 published in
31
+ P112 founded by
32
+ P5125 Wikimedia outline
33
+ P1995 health specialty
34
+ P105 taxon rank
35
+ P30 continent
36
+ P106 occupation
37
+ P463 member of
38
+ P462 color
39
+ P495 country of origin
40
+ P136 genre
41
+ P361 part of
42
+ P282 writing system
43
+ P175 performer
44
+ P750 distributed by
45
+ P162 producer
46
+ P150 contains the administrative territorial entity
47
+ P206 located in or next to body of water
48
+ P2184 history of topic
49
+ P1552 has quality
50
+ P2079 fabrication method
51
+ P2579 studied by
52
+ P1412 languages spoken, written or signed
53
+ P1269 facet of
54
+ P364 original language of film or TV show
55
+ P1001 applies to jurisdiction
56
+ P921 main subject
57
+ P740 location of formation
58
+ P2578 studies
59
+ P306 operating system
60
+ P1435 heritage designation
61
+ P186 made from material
62
+ P6 head of government
63
+ P828 has cause
64
+ P6379 has works in the collection
65
+ P163 flag
66
+ P264 record label
67
+ P38 currency
68
+ P461 opposite of
69
+ P190 twinned administrative body
70
+ P1365 replaces
71
+ P86 composer
72
+ P8402 open data portal
73
+ P1441 present in work
74
+ P2936 language used
75
+ P610 highest point
76
+ P2670 has part(s) of the class
77
+ P103 native language
78
+ P1303 instrument
79
+ P166 award received
80
+ P8744 economy of topic
81
+ P1454 legal form
82
+ P6216 copyright status
83
+ P793 significant event
84
+ P2175 medical condition treated
85
+ P171 parent taxon
86
+ P437 distribution format
87
+ P734 family name
88
+ P1582 natural product of taxon
89
+ P2354 has list
90
+ P421 located in time zone
91
+ P452 industry
92
+ P36 capital
93
+ P407 language of work or name
94
+ P735 given name
95
+ P7763 copyright status as a creator
96
+ P1542 has effect
97
+ P1424 topic's main template
98
+ P122 basic form of government
99
+ P205 basin country
100
+ P37 official language
101
+ P710 participant
102
+ P69 educated at
103
+ P1382 partially coincident with
104
+ P1465 category for people who died here
105
+ P20 place of death
106
+ P703 found in taxon
107
+ P737 influenced by
108
+ P1196 manner of death
109
+ P6104 maintained by WikiProject
110
+ P5869 model item
111
+ P170 creator
112
+ P1535 used by
113
+ P138 named after
114
+ P19 place of birth
115
+ P156 followed by
116
+ P272 production company
117
+ P1479 has contributing factor
118
+ P1557 manifestation of
119
+ P129 physically interacts with
120
+ P127 owned by
121
+ P417 patron saint
122
+ P39 position held
123
+ P706 located in/on physical feature
124
+ P9241 demographics of topic
125
+ P5658 railway traffic side
126
+ P1622 driving side
127
+ P551 residence
128
+ P61 discoverer or inventor
129
+ P425 field of this occupation
130
+ P172 ethnic group
131
+ P1344 participant in
132
+ P6886 writing language
133
+ P1376 capital of
134
+ P1313 office held by head of government
135
+ P7867 category for maps
136
+ P832 public holiday
137
+ P57 director
138
+ P141 IUCN conservation status
139
+ P180 depicts
140
+ P344 director of photography
141
+ P2789 connects with
142
+ P509 cause of death
143
+ P517 interaction
144
+ P1366 replaced by
145
+ P2868 subject has role
146
+ P2596 culture
147
+ P1589 lowest point
148
+ P1672 this taxon is source of
149
+ P641 sport
150
+ P1791 category of people buried here
151
+ P400 platform
152
+ P8989 category for the view of the item
153
+ P937 work location
154
+ P111 measured physical quantity
155
+ P2633 geography of topic
156
+ P1830 owner of
157
+ P301 category's main topic
158
+ P35 head of state
159
+ P618 source of energy
160
+ P58 screenwriter
161
+ P4952 safety classification and labelling
162
+ P1740 category for films shot at this location
163
+ P108 employer
164
+ P840 narrative location
165
+ P121 item operated
166
+ P194 legislative body
167
+ P277 programming language
168
+ P161 cast member
169
+ P291 place of publication
170
+ P195 collection
171
+ P50 author
172
+ P85 anthem
173
+ P40 child
174
+ A0 synonym
175
+ A1 antonym
176
+ A2 prerequisite
177
+ A3 identical to
178
+ A4 juxtaposition to
179
+ A5 contradictory to
180
+ A6 intersection to
181
+ A7 probabilistic attribute
182
+ A8 metaphor
183
+ A9 takes place in
184
+ A10 tool of
185
+ A11 target of
186
+ A12 corresponds to
187
+ A13 follow
188
+ A14 sufficient to
189
+ A15 verb-object
190
+ A16 head-modifier
191
+ A17 subject-object
192
+ A18 subject-predicate
dataset/MarKG/relation2textlong.txt ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ P31 that class of which this subject is a particular example and member; different from P279 (subclass of); for example: K2 is an instance of mountain; volcano is a subclass of mountain (and an instance of volcanic landform)
2
+ P910 main Wikimedia category
3
+ P366 main use of the subject (includes current and former usage)
4
+ P5008 property to indicate that an item is of particular interest for a Wikimedia project. This property does not add notability. Items should not be created with this property if they are not notable for Wikidata. See also P6104, P972, P2354.
5
+ P1740 the object is a category for films shot at or in the subject
6
+ P47 countries or administrative subdivisions, of equal level, that this item borders, either by land or water. A single common point is enough.
7
+ P1151 Wikimedia portal associated with this topic
8
+ P937 location where persons or organisations were actively participating in employment, business or other work
9
+ P264 brand and trademark associated with the marketing of subject music recordings and music videos
10
+ P421 time zone for this item
11
+ P1552 the entity has an inherent or distinguishing non-material characteristic
12
+ P1343 work where this item is described
13
+ P1269 topic of which this item is an aspect, item that offers a broader perspective on the same topic
14
+ P37 language designated as official by this item
15
+ P361 object of which the subject is a part (if this subject is already part of object A which is a part of object B, then please only make the subject part of object A). Inverse property of "has part" (P527, see also "has parts of the class" (P2670)).
16
+ P530 diplomatic relations of the country
17
+ P527 part of this subject; inverse property of "part of" (P361). See also "has parts of the class" (P2670).
18
+ P106 occupation of a person; see also "field of work" (Property:P101), "position held" (Property:P39)
19
+ P6886 language in which the writer has written their work
20
+ P279 this item is a subclass (subset) of that item; all instances of these items are instances of those items; different from P31 (instance of), e.g.: K2 is an instance of mountain; volcano is a subclass of mountain (and an instance of volcanic landform).
21
+ P1622 side of the road that vehicles drive on in a given jurisdiction
22
+ P750 distributor of a creative work; distributor for a record label; news agency; film distributor
23
+ P27 the object is a country that recognizes the subject as its citizen
24
+ P180 entity visually depicted in an image, literarily described in a work, or otherwise incorporated into an audiovisual or other medium; see also P921, 'main subject'
25
+ P131 the item is located on the territory of the following administrative entity. Use P276 for specifying locations that are non-administrative places and for items about events. Use P1382 if the item falls only partially into the administrative entity.
26
+ P6379 collection that has works of this person or organisation (use archive location P485 for the archives)
27
+ P1424 the main template relating to a topic
28
+ P17 sovereign state of this item (not to be used for human beings)
29
+ P1672 links a taxon to natural products it produces. Note that it does not say "this taxon is the source of" or "this taxon is a source of" as this may vary. Some products may be yielded by more than one taxon.
30
+ P495 country of origin of this item (creative work, food, phrase, product, etc.)
31
+ P205 country that have drainage to/from or border the body of water
32
+ P2853 standard plug type for mains electricity in a country
33
+ P2670 the subject instance (the subject is not a class) has one or more parts of the object class
34
+ P190 twin towns, sister cities, twinned municipalities and other localities that have a partnership or cooperative agreement, either legally or informally acknowledged by their governments
35
+ P1412 language(s) that a person or a people speaks, writes or signs, including the native language(s)
36
+ P30 continent of which the subject is a part
37
+ P2341 place that a language, folk dance, cooking style, food, species or other cultural expression is found (or was originally found)
38
+ P1465 category item for people who died in this location
39
+ P1464 category item that groups people born in this place
40
+ P5658 indicates for a country or a railway line whether rail traffic usually runs on the left or right hand side
41
+ P1001 the item (institution, law, public office, public register...) or statement belongs to or has power over or applies to the value (a territorial jurisdiction: a country, state, municipality, ...)
42
+ P171 closest parent taxon of the taxon in question
43
+ P103 language or languages a person has learned from early childhood
44
+ P1542 effect of this item
45
+ P1382 object that is partially part of, but not fully part of (P361), the subject
46
+ P150 (list of) direct subdivisions of an administrative territorial entity
47
+ P1441 this (fictional or fictionalized) entity or person appears in that work as part of the narration (use P2860 for works citing other works, P361/P1433 for works being part of other works, P1343 for entities described in non-fictional accounts)
48
+ P1889 item that is different from another item, with which it may be confused
49
+ P832 official public holiday that occurs in this place in its honor, usually a non-working day
50
+ P136 creative work's genre or an artist's field of work (P101). Use main subject (P921) to relate creative works to their topic
51
+ P61 subject who discovered, first described, invented, or developed this discovery or invention
52
+ P156 immediately following item in a series of which the subject is a part, preferably use as qualifier of P179 [if the subject has been replaced, e.g. political offices, use "replaced by" (P1366)]
53
+ P20 most specific known (e.g. city instead of country, or hospital instead of city) death location of a person, animal or fictional character
54
+ P840 the narrative of the work is set in this location
55
+ P206 body of water on or next to which a place is located
56
+ P50 main creator(s) of a written work (use on works, not humans); use P2093 when Wikidata item is unknown or does not exist
57
+ P140 religion of a person, organization or religious building, or associated with this subject
58
+ P3095 type of agents that study this subject or work in this profession
59
+ P21 sex or gender identity of human or animal. For human: male, female, non-binary, intersex, transgender female, transgender male, agender. For animal: male organism, female organism. Groups of same gender use subclass of (P279)
60
+ P706 located on the specified (geo)physical feature. Should not be used when the value is only political/administrative (P131) or a mountain range (P4552).
61
+ P463 organization, club or musical group to which the subject belongs. Do not use for membership in ethnic or social groups, nor for holding a political position, such as a member of parliament (use P39 for that).
62
+ P2184 item about the historical development of an subject's topic, sample: "history of Argentina" for "Argentina". To list key events of the topic, use "significant event" (P793)
63
+ P19 most specific known (e.g. city instead of country, or hospital instead of city) birth location of a person, animal or fictional character
64
+ P509 underlying or immediate cause of death. Underlying cause (e.g. car accident, stomach cancer) preferred. Use 'manner of death' (P1196) for broadest category, e.g. natural causes, accident, homicide, suicide
65
+ P437 method (or type) of distribution for the subject
66
+ P122 subject's government
67
+ P2578 the object that an academic field studies; distinct from field of work (P101), which is used for human, organization, etc.
68
+ P166 award or recognition received by a person, organization or creative work
69
+ P7763 states if the body of work published during the lifetime of this creator is still copyrighted or in the public domain
70
+ P163 subject's flag
71
+ P6104 WikiProject that maintains this property, item, or linked pages. If the WikiProject is hosted outside of Wikidata, define the scope using the qualifier "of (P642)"
72
+ P170 maker of this creative work or other object (where no more specific property exists)
73
+ P105 level in a taxonomic hierarchy
74
+ P1303 musical instrument that a person plays or teaches or used in a music occupation
75
+ P111 value of a physical property expressed as number multiplied by a unit
76
+ P121 equipment, installation or service operated by the subject
77
+ P112 founder or co-founder of this organization, religion or place
78
+ P737 this person, idea, etc. is informed by that other person, idea, etc., e.g. “Heidegger was influenced by Aristotle”
79
+ P127 owner of the subject
80
+ P462 color of subject
81
+ P2936 language widely used (spoken or written) in this place or at this event
82
+ P2354 Wikimedia list related to this subject
83
+ P407 language associated with this creative work (such as books, shows, songs, broadcasts or websites) or a name (for persons use "native language" (P103) and "languages spoken, written or signed" (P1412))
84
+ P551 the place where the person is or has been, resident
85
+ P2579 subject is studied by this science or domain
86
+ P186 material the subject or the object is made of or derived from (do not confuse with P10672 which is used for processes)
87
+ P2852 telephone number to contact emergency services
88
+ P276 location of the object, structure or event. In the case of an administrative entity as containing item use P131. For statistical entities use P8138. In the case of a geographic entity use P706. Use P7153 for locations associated with the object.
89
+ P461 item that is the opposite of this item
90
+ P460 this item is said to be the same as that item, but it's uncertain or disputed
91
+ P1995 main specialty that diagnoses, prevent human illness, injury and other physical and mental impairments
92
+ P344 person responsible for the framing, lighting, and filtration of the subject work
93
+ P364 language in which a film or a performance work was originally created. Deprecated for written works and songs; use P407 ("language of work or name") instead.
94
+ P1792 Wikimedia category for people associated with this place or organization
95
+ P35 official with the highest formal authority in a country/state
96
+ P1454 legal form of an entity
97
+ P108 person or organization for which the subject works or worked
98
+ P175 actor, musician, band or other performer associated with this role or musical work
99
+ P5869 defines which item is a best practice example of modelling a subject, which is described by the value of this property, usage instructions at Wikidata:Model items
100
+ P703 the taxon in which the item can be found
101
+ P5125 item about an outline of the topic at Wikimedia sites
102
+ P9241 item that deals with demographics of the subject
103
+ P417 patron saint adopted by the subject
104
+ P57 director(s) of film, TV-series, stageplay, video game or similar
105
+ P1376 country, state, department, canton or other administrative division of which the municipality is the governmental seat
106
+ P85 subject's official anthem
107
+ P8989 category for the view of this item (object, place)
108
+ P272 company that produced this film, audio or performing arts work
109
+ P1589 point with lowest elevation in the country, region, city or area
110
+ P162 person(s) who produced the film, musical work, theatrical production, etc. (for film, this does not include executive producers, associate producers, etc.) [for production company, use P272, video games - use P178]
111
+ P1830 entities owned by the subject
112
+ P1535 item or concept that makes use of the subject (use sub-properties when appropriate)
113
+ P921 primary topic of a work (see also P180: depicts)
114
+ P7867 name of the Wikimedia category specifically for maps or plans of this item
115
+ P159 city, where an organization's headquarters is or has been situated. Use P276 qualifier for specific building
116
+ P161 actor in the subject production [use "character role" (P453) and/or "name of the character role" (P4633) as qualifiers] [use "voice actor" (P725) for voice-only role]
117
+ P6216 copyright status for intellectual creations like works of art, publications, software, etc.
118
+ P6 head of the executive power of this town, city, municipality, state, country, or other governmental body
119
+ P1479 thing that significantly influenced, but did not directly cause, this outcome or effect. Used in conjunction with 'has cause' and 'has immediate cause'. See '[[Help:Modeling causes]]'.
120
+ P828 underlying cause, thing that ultimately resulted in this effect
121
+ P301 primary topic of the subject Wikimedia category
122
+ P618 describes the source of energy an animated object (machine or animal) uses
123
+ P101 specialization of a person or organization; see P106 for the occupation
124
+ P1557 inherent and characteristic embodiment of a given concept
125
+ P793 significant or notable events associated with the subject
126
+ P2175 disease that this pharmaceutical drug, procedure, or therapy is used to treat
127
+ P452 specific industry of company or organization
128
+ P138 entity or event that inspired the subject's name, or namesake (in at least one language). Qualifier "applies to name" (P5168) can be used to indicate which one
129
+ P734 part of full name of person
130
+ P155 immediately prior item in a series of which the subject is a part, preferably use as qualifier of P179 [if the subject has replaced the preceding item, e.g. political offices, use "replaces" (P1365)]
131
+ P2283 item or concept used by the subject or in the operation (see also instrument [P1303] and armament [P520])
132
+ P1056 material or product produced by a government agency, business, industry, facility, or process
133
+ P400 platform for which a work was developed or released, or the specific platform version of a software product
134
+ P194 legislative body governing this entity; political institution with elected representatives, such as a parliament/legislature or council
135
+ P610 point with highest elevation in a region, or on the path of a race or route
136
+ P1582 links a natural product with its source (animal, plant, fungal, algal, etc.)
137
+ P8402 the item for the open data portal belonging to this entity
138
+ P1366 other person or item which continues the item by replacing it in its role. Use P156 ("followed by") if the item is not replaced nor identical, but adds to the series (e.g. books in a series).
139
+ P2079 method, process or technique used to grow, cook, weave, build, assemble, manufacture the item
140
+ P641 sport that the subject participates or participated in or is associated with
141
+ P517 subset of the four fundamental forces (strong (Q11415), electromagnetic (Q849919), weak (Q11418), and gravitation (Q11412) with which a particle interacts
142
+ P306 operating system (OS) on which a software works or the OS installed on hardware
143
+ P1365 person, state or item replaced. Use "structure replaces" (P1398) for structures. Use "follows" (P155) if the previous item was not replaced or predecessor and successor are identical
144
+ P86 person(s) who wrote the music [for lyricist, use "lyrics by" (P676)]
145
+ P740 location where a group or organization was formed
146
+ P36 seat of government of a country, province, state or other type of administrative territorial entity
147
+ P2868 role/generic identity of the item ("subject"), also in the context of a statement. For the role of the value of the statement ("object"), use P3831 ("object has role"). For acting roles, use P453 ("character role"). For persons, use P39.
148
+ P38 currency used by item
149
+ P425 field corresponding to this occupation or profession (use only for occupations/professions - for people use Property:P101, for companies use P452)
150
+ P710 person, group of people or organization (object) that actively takes/took part in an event or process (subject). Preferably qualify with "object has role" (P3831). Use P1923 for participants that are teams.
151
+ P39 subject currently or formerly holds the object position or public office
152
+ P172 subject's ethnicity (consensus is that a VERY high standard of proof is needed for this field to be used. In general this means 1) the subject claims it themselves, or 2) it is widely agreed on by scholars, or 3) is fictional and portrayed as such)
153
+ P282 alphabet, character set or other system of writing used by a language, supported by a typeface
154
+ P1435 heritage designation of a cultural or natural site
155
+ P141 conservation status assigned by the International Union for Conservation of Nature
156
+ P2596 human culture or people (or several cultures) associated with this item
157
+ P8744 item that deals with the economy of the subject
158
+ P69 educational institution attended by subject
159
+ P1344 event in which a person or organization was/is a participant; inverse of P710 or P1923
160
+ P1791 Wikimedia category for people with a burial site within this area
161
+ P195 art, museum, archival, or bibliographic collection the subject is part of
162
+ P1196 general circumstances of a person's death; e.g. natural causes, accident, suicide, homicide, etc. Use 'cause of death' (P509) for the specific physiological mechanism, e.g. heart attack, trauma, pneumonia...
163
+ P129 physical entity that the subject interacts with
164
+ P1313 political office that is fulfilled by the head of the government of this item
165
+ P291 geographical place of publication of the edition (use 1st edition when referring to works)
166
+ P2633 item that deals with the geography of the subject. Sample: "Rio de Janeiro" uses this property with value "geography of Rio de Janeiro" (Q10288853). For the location of a subject, use "location" (P276).
167
+ P2789 item with which the item is physically connected
168
+ P735 first name or another given name of this person; values used with the property should not link disambiguations nor family names
169
+ P4952 classification and labelling data for risk identification about chemicals
170
+ P40 subject has object as child. Do not use for stepchildren
171
+ P58 person(s) who wrote the script for subject item
172
+ P1433 larger work that a given work was published in, like a book, journal or music album
173
+ P277 the programming language(s) in which the software is developed
174
+ A0 Sense of another lexeme with the same meaning as this sense
175
+ A1 Sense of a lexeme with the opposite meaning to this sense
176
+ A2 Prior event or achievement that a person or team needs to complete before joining or obtaining the item topic
177
+ A3 The meanings of two terms are identical
178
+ A4 Two terms belong to the same hypernym or have the same properties or functions
179
+ A5 Two term are contradictory to each other
180
+ A6 The extension of the two terms intersects
181
+ A7 One term is probably the attribute of the other
182
+ A8 A term is the metaphor of the other, reflecting something abstract indirectly
183
+ A9 A term takes place in the other
184
+ A10 One term is the tool of the other
185
+ A11 One term is the target of the other
186
+ A12 Terms generally correspond to each other
187
+ A13 The terms have a chronological or other sequential relationship, but one term does not cause the other
188
+ A14 One term is a sufficient condition for the other
189
+ A15 The action and the object on which the action acts
190
+ A16 The preceding term modifies the other
191
+ A17 The originator and receiver of an action
192
+ A18 The originator of the action and the action itself
dataset/MarKG/wiki_tuple_ids.txt ADDED
The diff for this file is too large to render. See raw diff
 
modeling_unimo.py ADDED
@@ -0,0 +1,976 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Optional, Tuple
2
+ import math
3
+
4
+ import torch
5
+ from torch import nn, Tensor, device
6
+ from torch.nn import CrossEntropyLoss
7
+
8
+ from transformers.activations import ACT2FN
9
+ from transformers.modeling_utils import (
10
+ PreTrainedModel,
11
+ apply_chunking_to_forward,
12
+ )
13
+ from transformers.configuration_utils import PretrainedConfig
14
+ from transformers.modeling_outputs import (
15
+ BaseModelOutput,
16
+ MaskedLMOutput,
17
+ BaseModelOutputWithPooling,
18
+ )
19
+
20
+ # some function
21
+ def get_extended_attention_mask(attention_mask: Tensor, input_shape: Tuple[int], device: device) -> Tensor:
22
+ """
23
+ Makes broadcastable attention and causal masks so that future and masked tokens are ignored.
24
+
25
+ Arguments:
26
+ attention_mask (:obj:`torch.Tensor`):
27
+ Mask with ones indicating tokens to attend to, zeros for tokens to ignore.
28
+ input_shape (:obj:`Tuple[int]`):
29
+ The shape of the input to the model.
30
+ device: (:obj:`torch.device`):
31
+ The device of the input to the model.
32
+
33
+ Returns:
34
+ :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`.
35
+ """
36
+ # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
37
+ # ourselves in which case we just need to make it broadcastable to all heads.
38
+ if attention_mask.dim() == 3:
39
+ extended_attention_mask = attention_mask[:, None, :, :]
40
+ elif attention_mask.dim() == 2:
41
+ # Provided a padding mask of dimensions [batch_size, seq_length]
42
+ # - if the model is a decoder, apply a causal mask in addition to the padding mask
43
+ # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length]
44
+ extended_attention_mask = attention_mask[:, None, None, :]
45
+ else:
46
+ raise ValueError(
47
+ f"Wrong shape for input_ids (shape {input_shape}) or attention_mask (shape {attention_mask.shape})"
48
+ )
49
+
50
+ # Since attention_mask is 1.0 for positions we want to attend and 0.0 for
51
+ # masked positions, this operation will create a tensor which is 0.0 for
52
+ # positions we want to attend and -10000.0 for masked positions.
53
+ # Since we are adding it to the raw scores before the softmax, this is
54
+ # effectively the same as removing these entirely.
55
+ extended_attention_mask = extended_attention_mask.to(dtype=torch.long) # fp16 compatibility
56
+ extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
57
+ return extended_attention_mask
58
+
59
+
60
+ def get_head_mask(
61
+ head_mask: Optional[Tensor], num_hidden_layers: int, is_attention_chunked: bool = False
62
+ ) -> Tensor:
63
+ """
64
+ Prepare the head mask if needed.
65
+
66
+ Args:
67
+ head_mask (:obj:`torch.Tensor` with shape :obj:`[num_heads]` or :obj:`[num_hidden_layers x num_heads]`, `optional`):
68
+ The mask indicating if we should keep the heads or not (1.0 for keep, 0.0 for discard).
69
+ num_hidden_layers (:obj:`int`):
70
+ The number of hidden layers in the model.
71
+ is_attention_chunked: (:obj:`bool`, `optional`, defaults to :obj:`False`):
72
+ Whether or not the attentions scores are computed by chunks or not.
73
+
74
+ Returns:
75
+ :obj:`torch.Tensor` with shape :obj:`[num_hidden_layers x batch x num_heads x seq_length x seq_length]` or
76
+ list with :obj:`[None]` for each layer.
77
+ """
78
+ head_mask = [None] * num_hidden_layers
79
+
80
+ return head_mask
81
+
82
+
83
+ # models
84
+ class UnimoConfig(PretrainedConfig):
85
+
86
+ def __init__(self, **kwargs):
87
+ super().__init__(**kwargs)
88
+
89
+
90
+ class UnimoPreTrainedModel(PreTrainedModel):
91
+ config_class = UnimoConfig
92
+ base_model_prefix = "clip"
93
+ supports_gradient_checkpointing = True
94
+ _keys_to_ignore_on_load_missing = [r"position_ids"]
95
+
96
+ def __init_weights(self, module):
97
+ pass
98
+
99
+
100
+ class CLIPVisionEmbeddings(nn.Module):
101
+ def __init__(self, config):
102
+ super().__init__()
103
+ self.config = config
104
+ self.embed_dim = config.hidden_size
105
+ self.image_size = config.image_size
106
+ self.patch_size = config.patch_size
107
+
108
+ self.class_embedding = nn.Parameter(torch.randn(self.embed_dim))
109
+
110
+ self.patch_embedding = nn.Conv2d(
111
+ in_channels=3, out_channels=self.embed_dim, kernel_size=self.patch_size, stride=self.patch_size, bias=False
112
+ )
113
+
114
+ self.num_patches = (self.image_size // self.patch_size) ** 2
115
+ self.num_positions = self.num_patches + 1
116
+ self.position_embedding = nn.Embedding(self.num_positions, self.embed_dim)
117
+ self.register_buffer("position_ids", torch.arange(self.num_positions).expand((1, -1)))
118
+
119
+ def forward(self, pixel_values):
120
+ # pixel_values: (bsz, 2, 3, 224, 224)
121
+ batch_size = pixel_values.shape[0]
122
+ patch_embeds = torch.cat([
123
+ self.patch_embedding(pixel_values[:, 0]).flatten(2).transpose(1, 2),
124
+ self.patch_embedding(pixel_values[:, 1]).flatten(2).transpose(1, 2)],
125
+ dim=1
126
+ ) # bsz, 98, 768
127
+ class_embeds = self.class_embedding.expand(batch_size, 1, -1)
128
+
129
+ embeddings = torch.cat([class_embeds, patch_embeds], dim=1)
130
+ embeddings = embeddings + torch.cat([self.position_embedding(self.position_ids), self.position_embedding(self.position_ids)[:, 1:]], dim=1)
131
+
132
+ return embeddings
133
+
134
+
135
+ class BertEmbeddings(nn.Module):
136
+ """Construct the embeddings from word, position and token_type embeddings."""
137
+
138
+ def __init__(self, config):
139
+ super().__init__()
140
+ self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
141
+ self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
142
+ self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
143
+
144
+ # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
145
+ # any TensorFlow checkpoint file
146
+ self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
147
+ self.dropout = nn.Dropout(config.hidden_dropout_prob)
148
+ # position_ids (1, len position emb) is contiguous in memory and exported when serialized
149
+ self.position_embedding_type = getattr(config, "position_embedding_type", "absolute")
150
+ self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
151
+
152
+ def forward(
153
+ self, input_ids=None, token_type_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0
154
+ ):
155
+ if input_ids is not None:
156
+ input_shape = input_ids.size()
157
+ else:
158
+ input_shape = inputs_embeds.size()[:-1]
159
+
160
+ seq_length = input_shape[1]
161
+
162
+ if position_ids is None:
163
+ position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
164
+
165
+ # Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs
166
+ # when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves
167
+ # issue #5664
168
+ if token_type_ids is None:
169
+ if hasattr(self, "token_type_ids"):
170
+ buffered_token_type_ids = self.token_type_ids[:, :seq_length]
171
+ buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length)
172
+ token_type_ids = buffered_token_type_ids_expanded
173
+ else:
174
+ token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
175
+
176
+ if inputs_embeds is None:
177
+ inputs_embeds = self.word_embeddings(input_ids)
178
+ token_type_embeddings = self.token_type_embeddings(token_type_ids)
179
+
180
+ embeddings = inputs_embeds + token_type_embeddings
181
+ if self.position_embedding_type == "absolute":
182
+ position_embeddings = self.position_embeddings(position_ids)
183
+ embeddings += position_embeddings
184
+ embeddings = self.LayerNorm(embeddings)
185
+ embeddings = self.dropout(embeddings)
186
+ return embeddings
187
+
188
+
189
+ class CLIPAttention(nn.Module):
190
+ """Multi-headed attention from 'Attention Is All You Need' paper"""
191
+
192
+ def __init__(self, config):
193
+ super().__init__()
194
+ self.config = config
195
+ self.embed_dim = config.hidden_size
196
+ self.num_heads = config.num_attention_heads
197
+ self.head_dim = self.embed_dim // self.num_heads
198
+ assert (
199
+ self.head_dim * self.num_heads == self.embed_dim
200
+ ), f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`: {self.num_heads})."
201
+ self.scale = self.head_dim ** -0.5
202
+ self.dropout = config.attention_dropout
203
+
204
+ self.k_proj = nn.Linear(self.embed_dim, self.embed_dim)
205
+ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim)
206
+ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim)
207
+ self.out_proj = nn.Linear(self.embed_dim, self.embed_dim)
208
+
209
+ def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
210
+ return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
211
+
212
+ def forward(
213
+ self,
214
+ hidden_states: torch.Tensor,
215
+ output_attentions: bool = False,
216
+ past_key_values: torch.Tensor = None,
217
+ ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
218
+ """Input shape: Batch x Time x Channel"""
219
+
220
+ bsz, tgt_len, embed_dim = hidden_states.size()
221
+
222
+ # get query proj
223
+ query_states = self.q_proj(hidden_states) * self.scale
224
+ key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
225
+ value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
226
+
227
+ if past_key_values is not None:
228
+ key_states = torch.cat([past_key_values[0], key_states], dim=2)
229
+ value_states = torch.cat([past_key_values[1], value_states], dim=2)
230
+
231
+ proj_shape = (bsz * self.num_heads, -1, self.head_dim)
232
+ query_states = self._shape(query_states, tgt_len, bsz)
233
+
234
+ query_states = query_states.view(*proj_shape)
235
+ key_states = key_states.view(*proj_shape)
236
+ value_states = value_states.view(*proj_shape)
237
+
238
+ src_len = key_states.size(1)
239
+ attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
240
+
241
+ if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):
242
+ raise ValueError(
243
+ f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}"
244
+ )
245
+ attn_weights = nn.functional.softmax(attn_weights, dim=-1)
246
+
247
+ if output_attentions:
248
+ # this operation is a bit akward, but it's required to
249
+ # make sure that attn_weights keeps its gradient.
250
+ # In order to do so, attn_weights have to reshaped
251
+ # twice and have to be reused in the following
252
+ attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
253
+ attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)
254
+ else:
255
+ attn_weights_reshaped = None
256
+
257
+ attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)
258
+
259
+ attn_output = torch.bmm(attn_probs, value_states)
260
+
261
+ if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):
262
+ raise ValueError(
263
+ f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}"
264
+ )
265
+
266
+ attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)
267
+ attn_output = attn_output.transpose(1, 2)
268
+ attn_output = attn_output.reshape(bsz, tgt_len, embed_dim)
269
+
270
+ attn_output = self.out_proj(attn_output)
271
+
272
+ return attn_output, attn_weights_reshaped
273
+
274
+
275
+ class CLIPMLP(nn.Module):
276
+ def __init__(self, config):
277
+ super().__init__()
278
+ self.config = config
279
+ self.activation_fn = ACT2FN[config.hidden_act]
280
+ self.fc1 = nn.Linear(config.hidden_size, config.intermediate_size)
281
+ self.fc2 = nn.Linear(config.intermediate_size, config.hidden_size)
282
+
283
+ def forward(self, hidden_states):
284
+ hidden_states = self.fc1(hidden_states)
285
+ hidden_states = self.activation_fn(hidden_states)
286
+ hidden_states = self.fc2(hidden_states)
287
+ return hidden_states
288
+
289
+
290
+ class BertSelfAttention(nn.Module):
291
+ def __init__(self, config):
292
+ super().__init__()
293
+ self.num_attention_heads = config.num_attention_heads # 12
294
+ self.attention_head_size = int(config.hidden_size / config.num_attention_heads) # 64
295
+ self.all_head_size = self.num_attention_heads * self.attention_head_size # 768
296
+
297
+ self.query = nn.Linear(config.hidden_size, self.all_head_size)
298
+ self.key = nn.Linear(config.hidden_size, self.all_head_size)
299
+ self.value = nn.Linear(config.hidden_size, self.all_head_size)
300
+
301
+ self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
302
+ self.fusion = BertFusion(config) #
303
+
304
+ # # adaptive analogy mask
305
+ # self.adaptive_weight = nn.ParameterList([
306
+ # # nn.Parameter(torch.FloatTensor(1).uniform_(1.0, 2.5)), # example to query
307
+ # # nn.Parameter(torch.FloatTensor(1).uniform_(1.0, 2.5)) # query to example
308
+ # nn.Parameter(torch.FloatTensor(1).uniform_(0.0, 0.5)), # example to query
309
+ # nn.Parameter(torch.FloatTensor(1).uniform_(0.5, 0.5)) # query to example
310
+ # ])
311
+
312
+ def transpose_for_scores(self, x):
313
+ new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
314
+ x = x.view(*new_x_shape)
315
+ return x.permute(0, 2, 1, 3)
316
+
317
+ def forward(
318
+ self,
319
+ hidden_states,
320
+ attention_mask=None,
321
+ head_mask=None,
322
+ output_attentions=False,
323
+ visual_hidden_state=None,
324
+ output_qks=None,
325
+ sep_idx=None
326
+ ):
327
+ mixed_query_layer = self.query(hidden_states)
328
+
329
+ # If this is instantiated as a cross-attention module, the keys
330
+ # and values come from an encoder; the attention mask needs to be
331
+ # such that the encoder's padding tokens are not attended to.
332
+ key_layer = self.transpose_for_scores(self.key(hidden_states))
333
+ value_layer = self.transpose_for_scores(self.value(hidden_states))
334
+ query_layer = self.transpose_for_scores(mixed_query_layer)
335
+
336
+ qks = (key_layer, value_layer) if output_qks else None
337
+
338
+ # Take the dot product between "query" and "key" to get the raw attention scores.
339
+ attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
340
+ attention_scores = attention_scores / math.sqrt(self.attention_head_size)
341
+
342
+ # if sep_idx is not None:
343
+ # for i, idx in enumerate(sep_idx):
344
+ # # example to answer
345
+ # # attention_scores[i, :, :idx[2], idx[2]:] = torch.sigmoid(self.adaptive_weight[0]) * attention_scores[i, :, :idx[2], idx[2]:].clone()
346
+ # attention_scores[i, :, :idx[2], idx[2]:] = torch.clamp(self.adaptive_weight[0], 0, 0.5) * attention_scores[i, :, :idx[2], idx[2]:].clone()
347
+ # # answer to example
348
+ # # attention_scores[i, :, idx[2]:, idx[2]:] = torch.sigmoid(self.adaptive_weight[1]) * attention_scores[i, :, idx[2]:, idx[2]:].clone()
349
+ # attention_scores[i, :, idx[2]:, idx[2]:] = torch.clamp(self.adaptive_weight[1], 0.5, 1) * attention_scores[i, :, idx[2]:, idx[2]:].clone()
350
+
351
+ if attention_mask is not None:
352
+ # Apply the attention mask is (precomputed for all layers in BertModel forward() function)
353
+ '''add adaptive analogy mask, attention_scores ~ (bsz, 12, seq_len, seq_len), attention_mask ~ (bsz, 1, seq_len, seq_len)'''
354
+
355
+ attention_scores = attention_scores + attention_mask
356
+
357
+ # Normalize the attention scores to probabilities.
358
+ attention_probs = nn.Softmax(dim=-1)(attention_scores)
359
+
360
+ # This is actually dropping out entire tokens to attend to, which might
361
+ # seem a bit unusual, but is taken from the original Transformer paper.
362
+ attention_probs = self.dropout(attention_probs)
363
+
364
+ # Mask heads if we want to
365
+ if head_mask is not None:
366
+ attention_probs = attention_probs * head_mask
367
+ context_layer = torch.matmul(attention_probs, value_layer)
368
+
369
+ context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
370
+ new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
371
+ context_layer = context_layer.view(*new_context_layer_shape) # bsz, 128, 768
372
+
373
+ fusion_output = self.fusion(context_layer, visual_hidden_state) if visual_hidden_state is not None else None # add
374
+
375
+ outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
376
+
377
+ return outputs, fusion_output, qks
378
+
379
+
380
+ class BertSelfOutput(nn.Module):
381
+ def __init__(self, config):
382
+ super().__init__()
383
+ self.dense = nn.Linear(config.hidden_size, config.hidden_size)
384
+ self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
385
+ self.dropout = nn.Dropout(config.hidden_dropout_prob)
386
+
387
+ def forward(self, hidden_states, input_tensor):
388
+ hidden_states = self.dense(hidden_states)
389
+ hidden_states = self.dropout(hidden_states)
390
+ hidden_states = self.LayerNorm(hidden_states + input_tensor)
391
+ return hidden_states
392
+
393
+
394
+ class BertFusion(nn.Module):
395
+ def __init__(self, config):
396
+ super().__init__()
397
+ # self.fusion_function = config.fusion_function
398
+ self.fusion_function = 'softmax'
399
+
400
+ def forward(
401
+ self,
402
+ hidden_states,
403
+ visual_hidden_state=None,
404
+ ):
405
+ fusion_scores = torch.matmul(hidden_states, visual_hidden_state.transpose(-1, -2)) # bsz, 128, 49
406
+ # if attention_mask is not None:
407
+ # # attention_mask: bsz, 1, 1, 128; fusion_scores: bsz, 128, 49
408
+ # fusion_scores = fusion_scores + attention_mask.squeeze(1).transpose(1, 2)
409
+ if self.fusion_function == 'softmax':
410
+ fusion_probs = nn.Softmax(dim=-1)(fusion_scores)
411
+ fusion_output = torch.matmul(fusion_probs, visual_hidden_state)
412
+ elif self.fusion_function == 'max':
413
+ fusion_probs = fusion_scores.max(dim=-1)
414
+ return fusion_output
415
+
416
+
417
+ class BertAttention(nn.Module):
418
+ def __init__(self, config):
419
+ super().__init__()
420
+ self.self = BertSelfAttention(config)
421
+ self.output = BertSelfOutput(config)
422
+ self.pruned_heads = set()
423
+
424
+ def forward(
425
+ self,
426
+ hidden_states,
427
+ attention_mask=None,
428
+ head_mask=None,
429
+ output_attentions=False,
430
+ visual_hidden_state=None,
431
+ output_qks=None,
432
+ sep_idx=None,
433
+ ):
434
+ self_outputs, fusion_output, qks = self.self(
435
+ hidden_states,
436
+ attention_mask,
437
+ head_mask,
438
+ output_attentions,
439
+ visual_hidden_state,
440
+ output_qks,
441
+ sep_idx
442
+ )
443
+ attention_output = self.output(self_outputs[0], hidden_states)
444
+ outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
445
+ return outputs, fusion_output, qks
446
+
447
+
448
+ class BertIntermediate(nn.Module):
449
+ def __init__(self, config):
450
+ super().__init__()
451
+ self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
452
+ self.fusion_dense = nn.Linear(config.hidden_size, config.intermediate_size)
453
+ if isinstance(config.hidden_act, str):
454
+ self.intermediate_act_fn = ACT2FN[config.hidden_act]
455
+ else:
456
+ self.intermediate_act_fn = config.hidden_act
457
+
458
+ def forward(self, hidden_states, fusion_output=None):
459
+ hidden_states = self.dense(hidden_states)
460
+ if fusion_output is not None:
461
+ fusion_states = self.fusion_dense(fusion_output)
462
+ hidden_states = hidden_states + fusion_states
463
+ hidden_states = self.intermediate_act_fn(hidden_states)
464
+ return hidden_states
465
+
466
+
467
+ class BertOutput(nn.Module):
468
+ def __init__(self, config):
469
+ super().__init__()
470
+ self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
471
+ self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
472
+ self.dropout = nn.Dropout(config.hidden_dropout_prob)
473
+
474
+ def forward(self, hidden_states, input_tensor):
475
+ hidden_states = self.dense(hidden_states)
476
+ hidden_states = self.dropout(hidden_states)
477
+ hidden_states = self.LayerNorm(hidden_states + input_tensor)
478
+ return hidden_states
479
+
480
+
481
+ class CLIPEncoderLayer(nn.Module):
482
+ def __init__(self, config):
483
+ super().__init__()
484
+ self.embed_dim = config.hidden_size
485
+ self.self_attn = CLIPAttention(config)
486
+ self.layer_norm1 = nn.LayerNorm(self.embed_dim)
487
+ self.mlp = CLIPMLP(config)
488
+ self.layer_norm2 = nn.LayerNorm(self.embed_dim)
489
+
490
+ def forward(
491
+ self,
492
+ hidden_states: torch.Tensor,
493
+ output_attentions: bool = False,
494
+ past_key_values: torch.Tensor = None,
495
+ ):
496
+ """
497
+ Args:
498
+ hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)`
499
+ attention_mask (:obj:`torch.FloatTensor`): attention mask of size
500
+ :obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
501
+ layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size
502
+ :obj:`(config.encoder_attention_heads,)`.
503
+ output_attentions (:obj:`bool`, `optional`):
504
+ Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
505
+ returned tensors for more detail.
506
+ """
507
+ residual = hidden_states
508
+
509
+ hidden_states = self.layer_norm1(hidden_states)
510
+ hidden_states, attn_weights = self.self_attn(
511
+ hidden_states=hidden_states,
512
+ output_attentions=output_attentions,
513
+ past_key_values=past_key_values,
514
+ )
515
+ hidden_states = residual + hidden_states
516
+
517
+ residual = hidden_states
518
+ hidden_states = self.layer_norm2(hidden_states)
519
+ hidden_states = self.mlp(hidden_states)
520
+ hidden_states = residual + hidden_states
521
+
522
+ outputs = (hidden_states,)
523
+
524
+ if output_attentions:
525
+ outputs += (attn_weights,)
526
+
527
+ return outputs
528
+
529
+
530
+ class BertLayer(nn.Module):
531
+ def __init__(self, config):
532
+ super().__init__()
533
+ self.chunk_size_feed_forward = config.chunk_size_feed_forward
534
+ self.seq_len_dim = 1
535
+ self.attention = BertAttention(config)
536
+ self.add_cross_attention = config.add_cross_attention
537
+ self.intermediate = BertIntermediate(config)
538
+ self.output = BertOutput(config)
539
+
540
+ def forward(
541
+ self,
542
+ hidden_states,
543
+ attention_mask=None,
544
+ head_mask=None,
545
+ output_attentions=False,
546
+ visual_hidden_state=None,
547
+ output_qks=None,
548
+ sep_idx=None,
549
+ ):
550
+ # decoder uni-directional self-attention cached key/values tuple is at positions 1,2
551
+ # self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
552
+ self_attention_outputs, fusion_output, qks = self.attention(
553
+ hidden_states,
554
+ attention_mask,
555
+ head_mask,
556
+ output_attentions=output_attentions,
557
+ visual_hidden_state=visual_hidden_state,
558
+ output_qks=output_qks,
559
+ sep_idx=sep_idx,
560
+ )
561
+ attention_output = self_attention_outputs[0]
562
+
563
+ outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
564
+
565
+ layer_output = apply_chunking_to_forward(
566
+ self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output, fusion_output
567
+ )
568
+ outputs = (layer_output,) + outputs
569
+ if output_qks:
570
+ outputs += (qks,)
571
+
572
+ return outputs
573
+
574
+ def feed_forward_chunk(self, attention_output, fusion_output):
575
+ intermediate_output = self.intermediate(attention_output, fusion_output)
576
+ layer_output = self.output(intermediate_output, attention_output)
577
+ return layer_output
578
+
579
+
580
+ class UnimoEncoder(nn.Module):
581
+ def __init__(self, vision_config, text_config):
582
+ super().__init__()
583
+ self.vision_config = vision_config
584
+ self.text_config = text_config
585
+
586
+ self.vision_layers = nn.ModuleList([CLIPEncoderLayer(vision_config) for _ in range(vision_config.num_hidden_layers)])
587
+ self.text_layer = nn.ModuleList([BertLayer(text_config) for _ in range(text_config.num_hidden_layers)])
588
+
589
+ def forward(
590
+ self,
591
+ vision_embeds=None,
592
+ text_embeds=None,
593
+ attention_mask=None,
594
+ head_mask=None,
595
+ output_attentions=None,
596
+ output_hidden_states=None,
597
+ return_dict=None,
598
+ sep_idx=None,
599
+ ):
600
+ assert self.vision_config.num_hidden_layers == self.text_config.num_hidden_layers
601
+
602
+ all_vision_hidden_states = () if output_hidden_states else None
603
+ all_text_hidden_states = () if output_hidden_states else None
604
+ all_vision_attentions = () if output_attentions else None
605
+ all_text_attentions = () if output_attentions else None
606
+
607
+ vision_hidden_states = vision_embeds
608
+ text_hidden_states = text_embeds
609
+ for idx in range(self.vision_config.num_hidden_layers):
610
+ if output_hidden_states:
611
+ all_vision_hidden_states = all_vision_hidden_states + (vision_hidden_states, )
612
+ all_text_hidden_states = all_text_hidden_states + (text_hidden_states, )
613
+
614
+ # vision
615
+ # TODO: 9-12 layers past text as pkv to vision
616
+ past_key_values = text_layer_output[-1] if idx >= 8 else None
617
+ vision_layer_module = self.vision_layers[idx]
618
+ vision_layer_output = vision_layer_module(
619
+ vision_hidden_states,
620
+ output_attentions=output_attentions,
621
+ past_key_values=past_key_values,
622
+ )
623
+ vision_hidden_states = vision_layer_output[0]
624
+
625
+ # text
626
+ # TODO: 9-12 layers past vison qks to text
627
+ last_hidden_state = vision_hidden_states if idx >= 8 else None
628
+ output_qks = True if idx >= 7 else None
629
+ layer_head_mask = head_mask[idx] if head_mask is not None else None
630
+ text_layer_module = self.text_layer[idx]
631
+ text_layer_output = text_layer_module(
632
+ text_hidden_states,
633
+ attention_mask=attention_mask,
634
+ head_mask=layer_head_mask,
635
+ visual_hidden_state=last_hidden_state,
636
+ output_attentions=output_attentions,
637
+ output_qks=output_qks,
638
+ sep_idx=sep_idx,
639
+ )
640
+ text_hidden_states = text_layer_output[0]
641
+ if output_attentions:
642
+ all_vision_attentions = all_vision_attentions + (vision_layer_output[1], )
643
+ all_text_attentions = all_text_attentions + (text_layer_output[1], )
644
+
645
+ if output_hidden_states:
646
+ all_vision_hidden_states = all_vision_hidden_states + (vision_hidden_states, )
647
+ all_text_hidden_states = all_text_hidden_states + (text_hidden_states, )
648
+
649
+ if not return_dict:
650
+ return tuple(
651
+ v for v in [
652
+ text_hidden_states,
653
+ all_text_hidden_states,
654
+ all_text_attentions,
655
+ ] if v is not None)
656
+ return BaseModelOutput(
657
+ last_hidden_state=text_hidden_states, hidden_states=all_text_hidden_states, attentions=all_text_attentions
658
+ )
659
+
660
+
661
+ class BertPooler(nn.Module):
662
+ def __init__(self, config):
663
+ super().__init__()
664
+ self.dense = nn.Linear(config.hidden_size, config.hidden_size)
665
+ self.activation = nn.Tanh()
666
+
667
+ def forward(self, hidden_states):
668
+ # We "pool" the model by simply taking the hidden state corresponding
669
+ # to the first token.
670
+ first_token_tensor = hidden_states[:, 0]
671
+ pooled_output = self.dense(first_token_tensor)
672
+ pooled_output = self.activation(pooled_output)
673
+ return pooled_output
674
+
675
+
676
+ class UnimoModel(nn.Module):
677
+ def __init__(self, vision_config, text_config, add_pooling_layer=True):
678
+ super(UnimoModel, self).__init__()
679
+ # vision model
680
+ self.vision_config = vision_config
681
+ self.vision_embeddings = CLIPVisionEmbeddings(vision_config)
682
+ self.vision_pre_layrnorm = nn.LayerNorm(vision_config.hidden_size)
683
+ self.vision_post_layernorm = nn.LayerNorm(vision_config.hidden_size)
684
+
685
+ # text model
686
+ self.text_config = text_config
687
+ self.text_embeddings = BertEmbeddings(text_config)
688
+ self.text_pooler = BertPooler(text_config) if add_pooling_layer else None
689
+
690
+ # all
691
+ self.encoder = UnimoEncoder(vision_config, text_config)
692
+
693
+ self.device = vision_config.device
694
+
695
+ def forward(
696
+ self,
697
+ input_ids=None,
698
+ attention_mask=None,
699
+ token_type_ids=None,
700
+ position_ids=None,
701
+ head_mask=None,
702
+ sep_idx=None,
703
+
704
+ pixel_values=None,
705
+ output_attentions=None,
706
+ output_hidden_states=None,
707
+ return_dict=None,
708
+ ):
709
+ # pre vision
710
+ vision_embedding_output = self.vision_embeddings(pixel_values)
711
+ vision_embedding_output = self.vision_pre_layrnorm(vision_embedding_output)
712
+
713
+ # pre text
714
+ input_shape = input_ids.size()
715
+ batch_size, seq_length = input_shape
716
+ device = input_ids.device
717
+ if attention_mask is None:
718
+ attention_mask = torch.ones(((batch_size, seq_length)), device=device)
719
+ if token_type_ids is None:
720
+ if hasattr(self.text_embeddings, "token_type_ids"):
721
+ buffered_token_type_ids = self.text_embeddings.token_type_ids[:, :seq_length]
722
+ buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length)
723
+ token_type_ids = buffered_token_type_ids_expanded
724
+ else:
725
+ token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
726
+
727
+
728
+ extended_attention_mask: torch.Tensor = get_extended_attention_mask(attention_mask, input_shape, device)
729
+ head_mask = get_head_mask(head_mask, self.text_config.num_hidden_layers) # [None]*12
730
+
731
+ text_embedding_output = self.text_embeddings(
732
+ input_ids=input_ids,
733
+ position_ids=position_ids,
734
+ token_type_ids=token_type_ids,
735
+ )
736
+
737
+ # all encoder
738
+ encoder_outputs = self.encoder(
739
+ vision_embeds=vision_embedding_output,
740
+ text_embeds=text_embedding_output,
741
+ attention_mask=extended_attention_mask,
742
+ output_attentions=output_attentions,
743
+ output_hidden_states=output_hidden_states,
744
+ return_dict=return_dict,
745
+ sep_idx=sep_idx,
746
+ )
747
+ sequence_output = encoder_outputs[0]
748
+ pooled_output = self.text_pooler(sequence_output) if self.text_pooler is not None else None
749
+
750
+ if not return_dict:
751
+ return (sequence_output, pooled_output) + encoder_outputs[1:]
752
+
753
+ return BaseModelOutputWithPooling(
754
+ last_hidden_state=sequence_output,
755
+ pooler_output=pooled_output,
756
+ hidden_states=encoder_outputs.hidden_states,
757
+ attentions=encoder_outputs.attentions,
758
+ )
759
+
760
+ def _init_text_weights(self, module):
761
+ """Initialize the weights"""
762
+ if isinstance(module, nn.Linear):
763
+ # Slightly different from the TF version which uses truncated_normal for initialization
764
+ # cf https://github.com/pytorch/pytorch/pull/5617
765
+ module.weight.data.normal_(mean=0.0, std=self.text_config.initializer_range)
766
+ if module.bias is not None:
767
+ module.bias.data.zero_()
768
+ elif isinstance(module, nn.Embedding):
769
+ module.weight.data.normal_(mean=0.0, std=self.text_config.initializer_range)
770
+ if module.padding_idx is not None:
771
+ module.weight.data[module.padding_idx].zero_()
772
+ elif isinstance(module, nn.LayerNorm):
773
+ module.bias.data.zero_()
774
+ module.weight.data.fill_(1.0)
775
+
776
+ def get_input_embeddings(self):
777
+ return self.text_embeddings.word_embeddings
778
+
779
+ def set_input_embeddings(self, value):
780
+ self.text_embeddings.word_embeddings = value
781
+
782
+ def resize_token_embeddings(self, new_num_tokens):
783
+ old_embeddings = self.get_input_embeddings()
784
+ new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens)
785
+ self.set_input_embeddings(new_embeddings)
786
+
787
+ def _get_resized_embeddings(
788
+ self, old_embeddings: nn.Embedding, new_num_tokens: Optional[int] = None
789
+ ) -> nn.Embedding:
790
+ """
791
+ Build a resized Embedding Module from a provided token Embedding Module. Increasing the size will add newly
792
+ initialized vectors at the end. Reducing the size will remove vectors from the end
793
+
794
+ Args:
795
+ old_embeddings (:obj:`torch.nn.Embedding`):
796
+ Old embeddings to be resized.
797
+ new_num_tokens (:obj:`int`, `optional`):
798
+ New number of tokens in the embedding matrix.
799
+
800
+ Increasing the size will add newly initialized vectors at the end. Reducing the size will remove
801
+ vectors from the end. If not provided or :obj:`None`, just returns a pointer to the input tokens
802
+ :obj:`torch.nn.Embedding`` module of the model without doing anything.
803
+
804
+ Return:
805
+ :obj:`torch.nn.Embedding`: Pointer to the resized Embedding Module or the old Embedding Module if
806
+ :obj:`new_num_tokens` is :obj:`None`
807
+ """
808
+ if new_num_tokens is None:
809
+ return old_embeddings
810
+ else:
811
+ old_num_tokens, old_embedding_dim = old_embeddings.weight.size()
812
+
813
+ if old_num_tokens == new_num_tokens:
814
+ return old_embeddings
815
+
816
+ if not isinstance(old_embeddings, nn.Embedding):
817
+ raise TypeError(
818
+ f"Old embeddings are of type {type(old_embeddings)}, which is not an instance of {nn.Embedding}."
819
+ f"You should either use a different resize function or make sure that `old_embeddings` are an instance of {nn.Embedding}."
820
+ )
821
+
822
+ # Build new embeddings
823
+ new_embeddings = nn.Embedding(new_num_tokens, old_embedding_dim).to(
824
+ self.device, dtype=old_embeddings.weight.dtype
825
+ )
826
+
827
+ # initialize all new embeddings (in particular added tokens)
828
+ self._init_text_weights(new_embeddings)
829
+
830
+ # Copy token embeddings from the previous weights
831
+
832
+ # numbers of tokens to copy
833
+ n = min(old_num_tokens, new_num_tokens)
834
+ new_embeddings.weight.data[:n, :] = old_embeddings.weight.data[:n, :]
835
+
836
+ return new_embeddings
837
+
838
+
839
+ class UnimoForMaskedLM(nn.Module):
840
+ def __init__(self, vision_config, text_config):
841
+ super().__init__()
842
+ self.unimo = UnimoModel(vision_config, text_config)
843
+ self.cls = UnimoOnlyMLMHead(text_config)
844
+ self.config = text_config
845
+
846
+ self.tie_weights()
847
+
848
+ def forward(
849
+ self,
850
+ input_ids=None,
851
+ attention_mask=None,
852
+ token_type_ids=None,
853
+ position_ids=None,
854
+ head_mask=None,
855
+ sep_idx=None,
856
+
857
+ pixel_values=None,
858
+ output_attentions=None,
859
+ output_hidden_states=None,
860
+ return_dict=None,
861
+ labels=None,
862
+ ):
863
+ outputs = self.unimo(
864
+ input_ids,
865
+ attention_mask=attention_mask,
866
+ token_type_ids=token_type_ids,
867
+ position_ids=position_ids,
868
+ head_mask=head_mask,
869
+ sep_idx=sep_idx,
870
+ pixel_values=pixel_values,
871
+ output_attentions=output_attentions,
872
+ output_hidden_states=output_hidden_states,
873
+ return_dict=return_dict,
874
+ )
875
+
876
+ sequence_output = outputs[0]
877
+ prediction_scores, trans_hidden_states = self.cls(sequence_output)
878
+
879
+ masked_lm_loss = None
880
+ if labels is not None:
881
+ loss_fct = CrossEntropyLoss() # -100 index = padding token
882
+ masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
883
+
884
+ if not return_dict:
885
+ output = (prediction_scores,) + outputs[2:]
886
+ return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
887
+
888
+ return MaskedLMOutput(
889
+ loss=masked_lm_loss,
890
+ logits=prediction_scores,
891
+ hidden_states=outputs.hidden_states,
892
+ attentions=outputs.attentions,
893
+ ), trans_hidden_states
894
+
895
+ def get_input_embeddings(self):
896
+ return self.unimo.text_embeddings.word_embeddings
897
+
898
+ def get_output_embeddings(self):
899
+ return self.cls.predictions.decoder
900
+
901
+ def set_output_embeddings(self, new_embeddings):
902
+ self.cls.predictions.decoder = new_embeddings
903
+
904
+ def tie_weights(self):
905
+ output_embeddings = self.get_output_embeddings()
906
+ self._tie_or_clone_weights(output_embeddings, self.unimo.get_input_embeddings())
907
+
908
+ def _tie_or_clone_weights(self, output_embeddings, input_embeddings):
909
+ """Tie or clone module weights depending of whether we are using TorchScript or not"""
910
+ if self.config.torchscript:
911
+ output_embeddings.weight = nn.Parameter(input_embeddings.weight.clone())
912
+ else:
913
+ output_embeddings.weight = input_embeddings.weight
914
+
915
+ if getattr(output_embeddings, "bias", None) is not None:
916
+ output_embeddings.bias.data = nn.functional.pad(
917
+ output_embeddings.bias.data,
918
+ (
919
+ 0,
920
+ output_embeddings.weight.shape[0] - output_embeddings.bias.shape[0],
921
+ ),
922
+ "constant",
923
+ 0,
924
+ )
925
+ if hasattr(output_embeddings, "out_features") and hasattr(input_embeddings, "num_embeddings"):
926
+ output_embeddings.out_features = input_embeddings.num_embeddings
927
+
928
+ def resize_token_embeddings(self, new_num_tokens):
929
+ self.unimo.resize_token_embeddings(new_num_tokens)
930
+ self.tie_weights()
931
+
932
+ class UnimoOnlyMLMHead(nn.Module):
933
+ def __init__(self, config):
934
+ super().__init__()
935
+ self.predictions = UnimoLMPredictionHead(config)
936
+
937
+ def forward(self, sequence_output):
938
+ prediction_scores, trans_hidden_states = self.predictions(sequence_output)
939
+ return prediction_scores, trans_hidden_states
940
+
941
+
942
+ class UnimoLMPredictionHead(nn.Module):
943
+ def __init__(self, config):
944
+ super().__init__()
945
+ self.transform = BertPredictionHeadTransform(config)
946
+
947
+ # The output weights are the same as the input embeddings, but there is
948
+ # an output-only bias for each token.
949
+ self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
950
+
951
+ self.bias = nn.Parameter(torch.zeros(config.vocab_size))
952
+
953
+ # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
954
+ self.decoder.bias = self.bias
955
+
956
+ def forward(self, hidden_states):
957
+ trans_hidden_states = self.transform(hidden_states)
958
+ hidden_states = self.decoder(trans_hidden_states)
959
+ return hidden_states, trans_hidden_states
960
+
961
+
962
+ class BertPredictionHeadTransform(nn.Module):
963
+ def __init__(self, config):
964
+ super().__init__()
965
+ self.dense = nn.Linear(config.hidden_size, config.hidden_size)
966
+ if isinstance(config.hidden_act, str):
967
+ self.transform_act_fn = ACT2FN[config.hidden_act]
968
+ else:
969
+ self.transform_act_fn = config.hidden_act
970
+ self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
971
+
972
+ def forward(self, hidden_states):
973
+ hidden_states = self.dense(hidden_states)
974
+ hidden_states = self.transform_act_fn(hidden_states)
975
+ hidden_states = self.LayerNorm(hidden_states)
976
+ return hidden_states