import datetime import gradio as gr from huggingface_hub import hf_hub_download from langdetect import detect, DetectorFactory, detect_langs import fasttext from transformers import pipeline models = { 'en': 'Narsil/deberta-large-mnli-zero-cls', # English #'es': 'Recognai/zeroshot_selectra_medium', # Spanish #'it': 'joeddav/xlm-roberta-large-xnli', # Italian #'ru': 'DeepPavlov/xlm-roberta-large-en-ru-mnli', # Russian #'tr': 'vicgalle/xlm-roberta-large-xnli-anli', # Turkish #'no': 'NbAiLab/nb-bert-base-mnli', # Norsk #'de': 'Sahajtomar/German_Zeroshot' # German } hypothesis_templates = { 'en': 'This example is {}.', # English #'es': 'Este ejemplo es {}.', # Spanish #'it': 'Questo esempio è {}.', # Italian #'ru': 'Этот пример {}.', # Russian #'tr': 'Bu örnek {}.', # Turkish #'no': 'Dette eksempelet er {}.', # Norsk #'de': 'Dieses beispiel ist {}.' # German } classifiers = {'en': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['en'], model=models['en']), #'es': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['es'], # model=models['es']), #'it': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['it'], # model=models['it']), #'ru': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['ru'], # model=models['ru']), #'tr': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['tr'], # model=models['tr']), #'no': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['no'], # model=models['no']), #'de': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['de'], # model=models['de']) } fasttext_model = fasttext.load_model(hf_hub_download("julien-c/fasttext-language-id", "lid.176.bin")) def prep_examples(): example_text1 = "The tangy feel in my mouth was a different experience altogether." example_labels1 = "Taste;;Smell;;Delivery;;Packaging;;Availability" example_text2 = "The aroma was not quite my type." example_labels2 = "Taste;;Smell;;Delivery;;Packaging;;Availability" example_text3 = "The chocolates came in an attractive orange box." example_labels3 = "Taste;;Smell;;Delivery;;Packaging;;Availability" example_text4 = "I can barely find this product in any store near where I stay." example_labels4 = "This product has availability issues.;;Stores do not stock this product.;;I live in a remote place." example_text5 = "I can barely find the products I love in the stores anywhere in this deserted place where I moved in few days back." example_labels5 = "This product has availability issues.;;Stores do not stock this product.;;I live in a remote place." example_text6 = "RATIONALE: The ability of nicotine to suppress body weight is cited as a factor impacting smoking initiation and the failure to quit. Self-administered nicotine in male rats suppresses weight independent of food intake, suggesting that nicotine increases energy expenditure. OBJECTIVE: The current experiment evaluated the impact of self-administered nicotine on metabolism in rats using indirect calorimetry and body composition analysis. METHODS: Adult male rats with ad libitum access to powdered standard rodent chow self-administered intravenous infusions of nicotine (60 μg/kg/infusion or saline control) in daily 1-h sessions in the last hour of the light cycle. Indirect calorimetry measured respiratory exchange ratio (RER), energy expenditure, motor activity, and food and water consumption for 22.5 h between select self-administration sessions. RESULTS: Self-administered nicotine suppressed weight gain and reduced the percent of body fat without altering the percent of lean mass, as measured by Echo MRI. Nicotine reduced RER, indicating increased fat utilization; this effect was observed prior to weight suppression. Moreover, nicotine intake did not affect motor activity or energy expenditure. Daily food intake was not altered by nicotine self-administration; however, a trend in suppression of meal size, a transient suppression of water intake, and an increase in meal frequency was observed. CONCLUSION: These data provide evidence that self-administered nicotine suppresses body weight via increased fat metabolism, independent of significant changes in feeding, activity, or energy expenditure. (Rupprecht et al. 2018)" example_labels6 = "Metabolism/metabolites;;Exposure;;Repeated dose toxicity;;Single dose toxicity;;distribution and excretion;;Absorption" example_text7 = "BACKGROUND AND PURPOSE: Tobacco smoke contains many classes of carcinogens and although nicotine is unable to initiate tumourigenesis in humans and rodents, it promotes tumour growth and metastasis in lung tumours by acting on neuronal nicotinic ACh receptors (nAChRs). The aim of this study was to identify molecularly, biochemically and pharmacologically which nAChR subtypes are expressed and functionally activated by nicotine in lung cancer cell lines. EXPERIMENTAL APPROACH: We used A549 and H1975 adenocarcinoma cell lines derived from lung tumours to test the in vitro effects of nicotine, and nAChR subtype-specific peptides and compounds. KEY RESULTS: The two adenocarcinoma cell lines express distinctive nAChR subtypes, and this affects their nicotine-induced proliferation. In A549 cells, nAChRs containing the α7 or α9 subunits not only regulate nicotine-induced cell proliferation but also the activation of the Akt and ERK pathways. Blocking these nAChRs by means of subtype-specific peptides, or silencing their expression by means of subunit-specific siRNAs, abolishes nicotine-induced proliferation and signalling. Moreover, we found that the α7 antagonist MG624 also acts on α9-α10 nAChRs, blocks the effects of nicotine on A549 cells and has dose-dependent cytotoxic activity. CONCLUSIONS AND IMPLICATIONS: These results highlight the pathophysiological role of α7- and α9-containing receptors in promoting non-small cell lung carcinoma cell growth and intracellular signalling and provide a framework for the development of new drugs that specifically target the receptors expressed in lung tumours. (Mucchietto et al. 2018)." example_labels7 = "Metabolism/metabolites;;Exposure;;Repeated dose toxicity;;Single dose toxicity;;Carcinogenicity;;Cytotoxicity;;distribution and excretion;;Absorption" example_text8 = "It is well-documented that nicotine, the main active ingredient in cigarettes, results in endothelial cell injury in numerous diseases. However, whether nicotine plays a crucial role in endothelial cell injury in diabetes and the exact molecular mechanism that mediates this process have not been fully elucidated. The current study aimed to investigate the effects of nicotine on endothelial cell injury in diabetes and the specific molecular mechanism by which it plays a role. Human umbilical vein endothelial cells (HUVECs) were incubated in HG/HF media and treated with nicotine, PYR-41 (a selective ubiquitin E1 inhibitor), Akt-overexpressing adenovirus, or TTC3 and MUL1 shRNA adenovirus. Cell viability was subsequently detected by the CCK8 assay, and apoptosis was examined by caspase-3 cleavage and activity analysis. Compared to the HG/HF incubated group, nicotine incubation significantly decreased cell survival and increased apoptosis. Moreover, nicotine induced Akt degradation via UPS, and Akt overexpression blocked nicotine-induced apoptosis in HUVECs cultured in HG/HF media. Furthermore, the TTC3 and MUL1 shRNA adenovirus dramatically decreased the Akt ubiquitination and apoptosis induced by nicotine. These results indicate that nicotine-induced Akt ubiquitination and degradation occurs through TTC3 and MUL1 and results in a dramatic increase in apoptosis in HUVECs cultured in HG/HF media. (Cao et al. 2018)." example_labels8 = "Metabolism/metabolites;;Exposure;;Repeated dose toxicity;;Single dose toxicity;;Carcinogenicity;;Cytotoxicity;;distribution and excretion;;Absorption" example_text9 = "Elephants are" example_labels9 = "big;;small;;strong;;fast;;carnivorous" example_text10 = "Do dogs really make better pets than cats or hamsters?" example_labels10 = "kittens;;hamsters;;cats;;dogs" example_text11 = "He is hungry." example_labels11 = "Positive;;Negative" example_text12 = "He is hungry to learn." example_labels12 = "Positive;;Negative" example_text13 = "Gogh was playing with his brush." example_labels13 = "Painting;;Amusement;;Sport" example_text14 = "He was playing with his brush." example_labels14 = "Painting;;Amusement;;Sport" example_text15 = "Googh was sitting by the window. He was playing with his brush." example_labels15 = "Painting;;Amusement;;Sport" example_text16 = "Microsoft has turned itself around." example_labels16 = "Under the leadership of Nadella;;Despite the leadership of Nadella;;And Nadella had nothing to do with it;;Under the leadership of Pichai" examples = [ [example_text1, example_labels1], [example_text2, example_labels2], [example_text3, example_labels3], [example_text4, example_labels4], [example_text5, example_labels5], [example_text6, example_labels6], [example_text7, example_labels7], [example_text8, example_labels8], [example_text9, example_labels9], [example_text10, example_labels10], [example_text11, example_labels11], [example_text12, example_labels12], [example_text13, example_labels13], [example_text14, example_labels14], [example_text15, example_labels15], [example_text16, example_labels16]] return examples def detect_lang(sequence, labels): DetectorFactory.seed = 0 seq_lang = 'en' sequence = sequence.replace('\n', ' ') try: #seq_lang = detect(sequence) #lbl_lang = detect(labels) seq_lang = fasttext_model.predict(sequence, k=1)[0][0].split("__label__")[1] lbl_lang = fasttext_model.predict(labels, k=1)[0][0].split("__label__")[1] except: print("Language detection failed!", "Date:{}, Sequence:{}, Labels:{}".format( str(datetime.datetime.now()), labels)) if seq_lang != lbl_lang: print("Different languages detected for sequence and labels!", "Date:{}, Sequence:{}, Labels:{}, Sequence Language:{}, Label Language:{}".format( str(datetime.datetime.now()), sequence, labels, seq_lang, lbl_lang)) if seq_lang in models: print("Sequence Language detected.", "Date:{}, Sequence:{}, Sequence Language:{}".format( str(datetime.datetime.now()), sequence, seq_lang)) else: print("Language not supported. Defaulting to English!", "Date:{}, Sequence:{}, Sequence Language:{}".format( str(datetime.datetime.now()), sequence, seq_lang)) seq_lang = 'en' return seq_lang def sequence_to_classify(sequence, labels): lang = 'en' #detect_lang(sequence, labels) classifier = classifiers[lang] label_clean = str(labels).split(";;") response = classifier(sequence, label_clean, multi_label=False) predicted_labels = response['labels'] predicted_scores = response['scores'] clean_output = {idx: float(predicted_scores.pop(0)) for idx in predicted_labels} print("Date:{}, Sequence:{}, Labels: {}".format( str(datetime.datetime.now()), sequence, predicted_labels)) return clean_output iface = gr.Interface( title="MARS Demo", description="Off-the-shelf NLP classifier with no domain or task-specific training.", fn=sequence_to_classify, inputs=[gr.inputs.Textbox(lines=10, label="Please enter the text you would like to classify...", placeholder="Text here..."), gr.inputs.Textbox(lines=2, label="Please enter the candidate labels (separated by 2 consecutive semicolons)...", placeholder="Labels here separated by ;;")], outputs=gr.outputs.Label(num_top_classes=1), #interpretation="default", examples=prep_examples()) iface.launch()