yangheng commited on
Commit
a9371e2
1 Parent(s): 28c5195
.gitignore CHANGED
@@ -32,6 +32,7 @@ glove.840B.300d.txt
32
  glove.42B.300d.txt
33
  glove.twitter.27B.txt
34
  *CHECKPOINT/
 
35
 
36
  # project main files
37
  release_note.json
 
32
  glove.42B.300d.txt
33
  glove.twitter.27B.txt
34
  *CHECKPOINT/
35
+ checkpoints.json
36
 
37
  # project main files
38
  release_note.json
app.py CHANGED
@@ -22,6 +22,7 @@ from pyabsa.utils.data_utils.dataset_manager import detect_infer_dataset
22
 
23
  download_all_available_datasets()
24
 
 
25
  def get_atepc_example(dataset):
26
  task = TaskCodeOption.Aspect_Polarity_Classification
27
  dataset_file = detect_infer_dataset(atepc_dataset_items[dataset], task)
@@ -83,6 +84,7 @@ def get_acos_example(dataset):
83
 
84
  try:
85
  from pyabsa import AspectTermExtraction as ATEPC
 
86
  atepc_dataset_items = {dataset.name: dataset for dataset in ATEPC.ATEPCDatasetList()}
87
  atepc_dataset_dict = {
88
  dataset.name: get_atepc_example(dataset.name)
@@ -112,11 +114,11 @@ except Exception as e:
112
  try:
113
  from pyabsa import ABSAInstruction
114
 
115
- acos_dataset_items = {dataset.name: dataset for dataset in ABSAInstruction.ACOSDatasetList()[:-1]}
116
  acos_dataset_dict = {
117
- dataset.name: get_acos_example(dataset.name) for dataset in ABSAInstruction.ACOSDatasetList()[:-1]
118
  }
119
- quadruple_extractor = ABSAInstruction.ABSAGenerator(checkpoint="multilingual", device=autocuda.auto_cuda())
120
  except Exception as e:
121
  print(e)
122
  acos_dataset_items = {}
@@ -154,7 +156,7 @@ def perform_aste_inference(text, dataset):
154
 
155
  pred_triplets = pd.DataFrame(result["Triplets"])
156
  true_triplets = pd.DataFrame(result["True Triplets"])
157
- return pred_triplets, true_triplets, "{}".format(text)
158
 
159
 
160
  def perform_acos_inference(text, dataset):
@@ -163,33 +165,42 @@ def perform_acos_inference(text, dataset):
163
  random.randint(0, len(acos_dataset_dict[dataset]) - 1)
164
  ]
165
 
166
- raw_output = quadruple_extractor.predict(text)
167
- outputs = raw_output[0].strip().split(', ')
168
- data = {}
169
- for output in outputs:
170
- for sub_output in output.split('|'):
171
- if 'aspect' in sub_output:
172
- data['aspect'] = sub_output.split(':')[1]
173
- elif 'opinion' in sub_output:
174
- data['opinion'] = sub_output.split(':')[1]
175
- elif 'sentiment' in sub_output:
176
- data['sentiment'] = sub_output.split(':')[1]
177
- elif 'polarity' in sub_output:
178
- data['polarity'] = sub_output.split(':')[1]
179
- elif 'category' in sub_output:
180
- try:
181
- data['category'] = sub_output.split(':')[1]
182
- except:
183
- data['category'] = ''
184
-
185
- result = pd.DataFrame.from_dict(data, orient='index').T
186
  return result, text
187
 
 
188
  demo = gr.Blocks()
189
 
190
  with demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
 
 
 
192
 
 
 
 
 
 
193
  with gr.Row():
194
  if triplet_extractor:
195
  with gr.Column():
@@ -249,30 +260,7 @@ with demo:
249
  inputs=[atepc_input_sentence, atepc_dataset_ids],
250
  outputs=[atepc_output_df, atepc_output_text],
251
  )
252
- if quadruple_extractor:
253
- with gr.Row():
254
- with gr.Column():
255
- gr.Markdown("# <p align='center'>Aspect Category Opinion Sentiment Extraction !</p>")
256
 
257
- acos_input_sentence = gr.Textbox(
258
- placeholder="Leave this box blank and choose a dataset will give you a random example...",
259
- label="Example:",
260
- )
261
- acos_dataset_ids = gr.Radio(
262
- choices=[dataset.name for dataset in ABSAInstruction.ACOSDatasetList()],
263
- value="Restaurant16",
264
- label="Datasets",
265
- )
266
- acos_inference_button = gr.Button("Let's go!")
267
-
268
- acos_output_text = gr.TextArea(label="Example:")
269
- acos_output_pred_df = gr.DataFrame(label="Predicted Triplets:")
270
-
271
- acos_inference_button.click(
272
- fn=perform_acos_inference,
273
- inputs=[acos_input_sentence, acos_dataset_ids],
274
- outputs=[acos_output_pred_df, acos_output_text],
275
- )
276
  gr.Markdown(
277
  """### GitHub Repo: [PyABSA V2](https://github.com/yangheng95/PyABSA)
278
  ### Author: [Heng Yang](https://github.com/yangheng95) (杨恒)
 
22
 
23
  download_all_available_datasets()
24
 
25
+
26
  def get_atepc_example(dataset):
27
  task = TaskCodeOption.Aspect_Polarity_Classification
28
  dataset_file = detect_infer_dataset(atepc_dataset_items[dataset], task)
 
84
 
85
  try:
86
  from pyabsa import AspectTermExtraction as ATEPC
87
+
88
  atepc_dataset_items = {dataset.name: dataset for dataset in ATEPC.ATEPCDatasetList()}
89
  atepc_dataset_dict = {
90
  dataset.name: get_atepc_example(dataset.name)
 
114
  try:
115
  from pyabsa import ABSAInstruction
116
 
117
+ acos_dataset_items = {dataset.name: dataset for dataset in ABSAInstruction.ACOSDatasetList()}
118
  acos_dataset_dict = {
119
+ dataset.name: get_acos_example(dataset.name) for dataset in ABSAInstruction.ACOSDatasetList()
120
  }
121
+ quadruple_extractor = ABSAInstruction.ABSAGenerator("multilingual")
122
  except Exception as e:
123
  print(e)
124
  acos_dataset_items = {}
 
156
 
157
  pred_triplets = pd.DataFrame(result["Triplets"])
158
  true_triplets = pd.DataFrame(result["True Triplets"])
159
+ return pred_triplets, true_triplets, "{}".format(text.split('####')[0])
160
 
161
 
162
  def perform_acos_inference(text, dataset):
 
165
  random.randint(0, len(acos_dataset_dict[dataset]) - 1)
166
  ]
167
 
168
+ raw_output = quadruple_extractor.predict(text.split('####')[0], max_length=128)
169
+
170
+ result = raw_output['Quadruples']
171
+ result = pd.DataFrame(result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  return result, text
173
 
174
+
175
  demo = gr.Blocks()
176
 
177
  with demo:
178
+ with gr.Row():
179
+
180
+ if quadruple_extractor:
181
+ with gr.Row():
182
+ with gr.Column():
183
+ gr.Markdown("# <p align='center'> ABSA Quadruple Extraction (Experimental) </p>")
184
+
185
+ acos_input_sentence = gr.Textbox(
186
+ placeholder="Leave this box blank and choose a dataset will give you a random example...",
187
+ label="Example:",
188
+ )
189
+ acos_dataset_ids = gr.Radio(
190
+ choices=[dataset.name for dataset in ABSAInstruction.ACOSDatasetList()],
191
+ value="Laptop14",
192
+ label="Datasets",
193
+ )
194
+ acos_inference_button = gr.Button("Let's go!")
195
 
196
+ acos_output_text = gr.TextArea(label="Example:")
197
+ acos_output_pred_df = gr.DataFrame(label="Predicted Triplets:")
198
 
199
+ acos_inference_button.click(
200
+ fn=perform_acos_inference,
201
+ inputs=[acos_input_sentence, acos_dataset_ids],
202
+ outputs=[acos_output_pred_df, acos_output_text],
203
+ )
204
  with gr.Row():
205
  if triplet_extractor:
206
  with gr.Column():
 
260
  inputs=[atepc_input_sentence, atepc_dataset_ids],
261
  outputs=[atepc_output_df, atepc_output_text],
262
  )
 
 
 
 
263
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
  gr.Markdown(
265
  """### GitHub Repo: [PyABSA V2](https://github.com/yangheng95/PyABSA)
266
  ### Author: [Heng Yang](https://github.com/yangheng95) (杨恒)
checkpoints.json DELETED
@@ -1 +0,0 @@
1
- {"2.0.0": {"APC": {"multilingual": {"id": "", "Training Model": "FAST-LSA-T-V2-Deberta", "Training Dataset": "APCDatasetList.Multilingual", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lcf_bert_Multilingual_acc_87.18_f1_83.11.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "multilingual2": {"id": "", "Training Model": "FAST-LSA-T-V2-Deberta", "Training Dataset": "APCDatasetList.Multilingual", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lcf_bert_Multilingual_acc_82.66_f1_82.06.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "english": {"id": "", "Training Model": "FAST-LSA-T-V2-Deberta", "Training Dataset": "APCDatasetList.English", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lsa_t_v2_English_acc_82.21_f1_81.81.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "chinese": {"id": "", "Training Model": "FAST-LSA-T-V2-Deberta", "Training Dataset": "APCDatasetList.Chinese", "Language": "Chinese", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lsa_t_v2_Chinese_acc_96.0_f1_95.1.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "ATEPC": {"multilingual": {"id": "", "Training Model": "FAST-LCF-ATEPC", "Training Dataset": "ABSADatasets.Multilingual", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "fast_lcf_atepc_Multilingual_cdw_apcacc_85.1_apcf1_80.2_atef1_76.45.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "multilingual-original": {"id": "", "Training Model": "FAST-LCF-ATEPC", "Training Dataset": "ABSADatasets.Multilingual", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "fast_lcf_atepc_Multilingual_cdw_apcacc_80.81_apcf1_73.75_atef1_76.01.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "multilingual2": {"id": "", "Training Model": "FAST-LCF-ATEPC", "Training Dataset": "ABSADatasets.Multilingual", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "fast_lcf_atepc_Multilingual_cdw_apcacc_78.08_apcf1_77.81_atef1_75.41.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "english": {"id": "", "Training Model": "FAST-LCF-ATEPC", "Training Dataset": "ATEPCDatasetList.English", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lcf_atepc_English_cdw_apcacc_82.36_apcf1_81.89_atef1_75.43.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "chinese": {"id": "", "Training Model": "FAST-LCF-ATEPC", "Training Dataset": "ATEPCDatasetList.Chinese", "Language": "Chinese", "Description": "Trained on RTX3090", "Available Version": "1.10.5+", "Checkpoint File": "fast_lcf_atepc_Chinese_cdw_apcacc_96.22_apcf1_95.32_atef1_78.73.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "RNAC": {"degrad_lstm": {"id": "", "Training Model": "LSTM", "Training Dataset": "ABSADatasets.Multilingual", "Language": "RNA", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "lstm_degrad_acc_85.26_f1_84.62.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "degrad_bert": {"id": "", "Training Model": "MLP", "Training Dataset": "Degrad", "Language": "RNA", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "bert_mlp_degrad_acc_87.44_f1_86.99.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "TAD": {"tad-sst2": {"id": "", "Training Model": "TAD", "Training Dataset": "SST2", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "1.15+", "Checkpoint File": "TAD-SST2.zip", "Author": "H, Yang (yangheng@m.scnu.edu.cn)"}, "tad-agnews10k": {"id": "", "Training Model": "TAD", "Training Dataset": "AGNews", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "1.15+", "Checkpoint File": "TAD-AGNews10K.zip", "Author": "H, Yang (yangheng@m.scnu.edu.cn)"}, "tad-amazon": {"id": "", "Training Model": "TAD", "Training Dataset": "AGNews", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "1.15+", "Checkpoint File": "TAD-Amazon.zip", "Author": "H, Yang (yangheng@m.scnu.edu.cn)"}}, "CDD": {"promise": {"id": "", "Training Model": "CodeT5-small", "Training Dataset": "Promise", "Language": "Code", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "bert_mlp_all_cpdp_acc_75.33_f1_73.52.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "ASTE": {"english1": {"id": "", "Training Model": "DeBERTa-v3-Base", "Training Dataset": "SemEval", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "2.1.1+", "Checkpoint File": "EMCGCN_SemEval_f1_74.01.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "english": {"id": "", "Training Model": "DeBERTa-v3-Base", "Training Dataset": "SemEval", "Language": "English", "Description": "Trained on RTX3090", "Available Version": "2.1.1+", "Checkpoint File": "ASTE-EMCGCN_SemEval_f1_74.71.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}, "multilingual": {"id": "", "Training Model": "DeBERTa-v3-Base", "Training Dataset": "SemEval + Synthetic + Chinese_Zhang datasets", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "2.1.1+", "Checkpoint File": "EMCGCN-Multilingual-f1_51.95.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "ACOS": {"multilingual": {"id": "", "Training Model": "DeBERTa-v3-Base", "Training Dataset": "SemEval + Synthetic + Chinese_Zhang datasets", "Language": "Multilingual", "Description": "Trained on RTX3090", "Available Version": "2.1.8+", "Checkpoint File": "ACOS.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}, "UPPERTASKCODE": {"promise": {"id": "", "Training Model": "CodeT5-small", "Training Dataset": "DatasetName", "Language": "", "Description": "Trained on RTX3090", "Available Version": "1.16.0+", "Checkpoint File": "lstm_degrad_acc_85.26_f1_84.62.zip", "Author": "H, Yang (hy345@exeter.ac.uk)"}}}}
 
 
checkpoints/Multilingual/ACOS/multilingual-acos.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b7e6f53b721579e10fab9d82ff085caf051a6917dcd7d2ec9a4d00a8c44c8d0
3
- size 882150443
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:550bd9d9c5c82b8e34236a3819b70fd2da3e8b1058566ca15ae1f2a4190a5486
3
+ size 920064620
emergency_notification.txt CHANGED
@@ -1,5 +1,6 @@
1
 
2
  [New Feature] Aspect Sentiment Triplet Extraction from v2.1.0 test version (https://github.com/yangheng95/PyABSA/tree/v2/examples-v2/aspect_sentiment_triplet_extration)
 
3
 
4
  If you find any problems, please report them on GitHub. Thanks!
5
  The v2.x versions are not compatible with Google Colab. Please downgrade to 1.16.27.
 
1
 
2
  [New Feature] Aspect Sentiment Triplet Extraction from v2.1.0 test version (https://github.com/yangheng95/PyABSA/tree/v2/examples-v2/aspect_sentiment_triplet_extration)
3
+ [New Feature] Aspect CategoryOpinion Sentiment Quadruple Extraction from v2.2.0 test version (https://github.com/yangheng95/PyABSA/tree/v2/examples-v2/aspect_opinion_sentiment_category_extraction)
4
 
5
  If you find any problems, please report them on GitHub. Thanks!
6
  The v2.x versions are not compatible with Google Colab. Please downgrade to 1.16.27.
requirements.txt CHANGED
@@ -1 +1 @@
1
- pyabsa>=2.1.4
 
1
+ pyabsa>=2.1.12