#!/usr/bin/python
# 模型加载。从网上下载到本地，并进行缓存，然后加载。如果缓存，直接加载。
# 2022-12-9
import torch
from modelscope.utils.constant import Tasks
from modelscope.pipelines import pipeline as cgec_pipeline
from transformers import pipeline as tfms_pipeline
from happytransformer import HappyTextToText, TTSettings
# from transformers import BartForConditionalGeneration
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# from transformers import BartForConditionalGeneration
import synonyms
from transformers import AutoModelForQuestionAnswering
from ctc_score import SummarizationScorer
import stanza

# 判断cuda是否可用
device = "cpu"
if torch.cuda.is_available():
    device = "cuda"

# cgec 中文语法纠错模块
# https://www.modelscope.cn/models/damo/nlp_bart_text-error-correction_chinese/summary
cgec_pos = 'damo/nlp_bart_text-error-correction_chinese'
cgec_model = cgec_pipeline(Tasks.text_error_correction, model=cgec_pos, device=device)
cgec_question = "遇到逆竟时，我们必须勇于面对，而且要愈挫愈勇，这样我们才能朝著成功之路前进。"

# egec 英文语法纠错模块
# https://huggingface.co/vennify/t5-base-grammar-correction
egec_model = HappyTextToText("T5", "vennify/t5-base-grammar-correction")
args = TTSettings(num_beams=5, min_length=1)
egec_question = "This sentences has has bads grammar."

# # eaqg 英文有答案问题生成
# # https://huggingface.co/lmqg/bart-base-squad
# eaqg_model = tfms_pipeline("text2text-generation", 'lmqg/bart-base-squad')
# eaqg_text = "<hl> Beyonce <hl> further expanded her acting career, starring as blues singer Etta James in the 2008 musical biopic, Cadillac Records."

# # caqg 中文有答案问题生成
# # https://huggingface.co/IDEA-CCNL/Randeng-BART-139M-QG-Chinese
# caqg_tokenizer = AutoTokenizer.from_pretrained("IDEA-CCNL/Randeng-BART-139M-QG-Chinese",additional_special_tokens=["<ans>"])
# caqg_model = BartForConditionalGeneration.from_pretrained("IDEA-CCNL/Randeng-BART-139M-QG-Chinese")
# caqg_text = "知识：1939年9月1日德国入侵波兰后，第二次世界大战开始，华沙一直被保卫到9月27日。波兰中部，包括华沙，都在德国纳粹殖民地政府总政府的统治下。所有的高" \
#             "等教育机构都立即关闭，华沙的犹太人口——几十万，约占城市的 <ans> ——全部涌入华沙的贫民区。回答：30%."

# cnaqg 中文无答案问题生成
# https://huggingface.co/algolet/mt5-base-chinese-qg
cnaqg_name = "algolet/mt5-base-chinese-qg"
cnaqg_model = tfms_pipeline('text2text-generation', model=cnaqg_name, tokenizer=cnaqg_name)
cnaqg_text = "在一个寒冷的冬天，赶集完回家的农夫在路边发现了一条冻僵了的蛇。他很可怜蛇，就把它放在怀里。当他身上的热气把蛇温暖以后，蛇很快苏醒了，露出了残忍的本性" \
             "，给了农夫致命的伤害——咬了农夫一口。农夫临死之前说：“我竟然救了一条可怜的毒蛇，就应该受到这种报应啊！”"

# enaqg 英文无答案问题生成
# https://huggingface.co/nbroad/mt5-base-qgen
enaqg_tokenizer = AutoTokenizer.from_pretrained("nbroad/mt5-base-qgen")
enaqg_model = AutoModelForSeq2SeqLM.from_pretrained("nbroad/mt5-base-qgen")
enaqg_text = "Hugging Face has seen rapid growth in its popularity since the get-go. It is definitely doing the right things to attract more" \
             " and more people to its platform, some of which are on the following lines:Community driven approach through large open source" \
             " repositories along with paid services. Helps to build a network of like-minded people passionate about open source.  Attractive" \
             " price point. The subscription-based features, e.g.: Inference based API, starts at a price of $9/month."

# edg英文干扰项生成
# https://huggingface.co/voidful/bart-distractor-generation-both
edg_name = "voidful/bart-distractor-generation-both"
edg_model = tfms_pipeline('text2text-generation', model=edg_name)
edg_context = "When you ' re having a holiday , one of the main questions to ask is which hotel or apartment" \
       " to choose . However , when it comes to France , you have another special choice : treehouses" \
       " . In France , treehouses are offered to travelers as a new choice in many places . The price may" \
       " be a little higher , but you do have a chance to _ your childhood memories . Alain Laurens , one" \
       " of France ' s top treehouse designers , said , ' Most of the people might have the experience of" \
       " building a den when they were young . And they like that feeling of freedom when they are children" \
       " . ' Its fairy - tale style gives travelers a special feeling . It seems as if they are living as a" \
       " forest king and enjoying the fresh air in the morning . Another kind of treehouse is the ' star cube" \
       " ' . It gives travelers the chance of looking at the stars shining in the sky when they are going to sleep" \
       " . Each ' star cube ' not only offers all the comfortable things that a hotel provides for travelers , but" \
       " also gives them a chance to look for stars by using a telescope . The glass roof allows you to look at the" \
       " stars from your bed ."
edg_question = "The passage mainly tells us"
edg_answer = "treehouses in france."

# 中文干扰项生成
# https://github.com/chatopera/Synonyms
cdg_model = synonyms
cdg_answer = "人脸"

# 英文QA评估可回答性模块
# https://huggingface.co/mfeb/albert-xxlarge-v2-squad2
eqa_name = "mfeb/albert-xxlarge-v2-squad2"
eqa_model = tfms_pipeline('question-answering', model=eqa_name)
eqa_context = "The Amazon rainforest (Portuguese: " \
          "Floresta Amazônica or Amazônia; Spanish: Selva Amazónica" \
          ", Amazonía or usually Amazonia; French: Forêt amazonienne;" \
          " Dutch: Amazoneregenwoud), also known in English as Amazonia" \
          " or the Amazon Jungle, is a moist broadleaf forest that covers" \
          " most of the Amazon basin of South America. This basin" \
          " encompasses 7,000,000 square kilometres (2,700,000 sq mi)" \
          ", of which 5,500,000 square kilometres (2,100,000 sq mi)" \
          " are covered by the rainforest. This region includes territory" \
          " belonging to nine nations. The majority of the forest is contained" \
          " within Brazil, with 60% of the rainforest, followed by Peru with" \
          " 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador" \
          ", Bolivia, Guyana, Suriname and French Guiana. States or departments" \
          " in four nations contain 'Amazonas' in their names. The" \
          " Amazon represents over half of the planet's remaining" \
          " rainforests, and comprises the largest and most biodiverse" \
          " tract of tropical rainforest in the world, with an estimated " \
          "390 billion individual trees divided into 16,000 species."
eqa_question = "Which name is also used to describe the Amazon rainforest in English?"

# 中文QA评估可回答性模块
# https://huggingface.co/uer/roberta-base-chinese-extractive-qa
cqa_data = AutoModelForQuestionAnswering.from_pretrained('uer/roberta-base-chinese-extractive-qa')
cqa_tokenizer = AutoTokenizer.from_pretrained('uer/roberta-base-chinese-extractive-qa')
cqa_model = tfms_pipeline('question-answering', model=cqa_data, tokenizer=cqa_tokenizer)
cqa_question = "著名诗歌《假如生活欺骗了你》的作者是谁？"
cqa_context = "普希金从那里学习人民的语言，吸取了许多有益的养料，这一切对普希金后来的创作产生了很大的影响。这两年里，普希金创作了不少优秀的作品，如《囚徒》、《致大海》、《致凯恩》和《假如生活欺骗了你》等几十首抒情诗，叙事诗《努林伯爵》，历史剧《鲍里斯·戈都诺夫》，以及《叶甫盖尼·奥涅金》前六章。"

# 一致性相关性评估模块
# https://github.com/tanyuqian/ctc-gen-eval
scorer = SummarizationScorer(align='E-bert',device=device)
ch_eval_context = "如果把计算机比做人，那么CPU就是人的大脑，CPU主要功能是对系统操作指令进行算数和逻辑运算。"
ch_eval_question = "CPU的主要功能是什么"
ch_eval_ref = "CPU主要功能是什么"
en_eval_context = "Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24 to earn their third Super Bowl title. The game was played on February 7, 2016, at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California. As this was the 50th Super Bowl, the league emphasized the \"golden anniversary\" with various gold-themed initiatives, as well as temporarily suspending the tradition of naming each Super Bowl game with Roman numerals (under which the game would have been known as \"Super Bowl L\"), so that the logo could prominently feature the Arabic numerals 50."
en_eval_question = "Which NFL team represented the AFC at Super Bowl 50"
en_eval_ref = "Which NFL team represented the AFC at Super Bowl 50"

# ener 中英文命名实体识别
# https://stanfordnlp.github.io/stanza/
en_ner_model = stanza.Pipeline('en', processors={'ner':'tokenize'}, use_gpu=False, download_method=None) # This sets up a default neural pipeline in English
en_ner_text ="Barack Obama was born in Hawaii.  He was elected president in 2008."
ch_ner_model = stanza.Pipeline('zh', processors={'ner':'tokenize'}, use_gpu=False, download_method=None)
ch_ner_text = "欢迎来到的中国，你要去哪个城市？"
