import os PATH = '/data/' # at least 150GB storage needs to be attached os.environ['TRANSFORMERS_CACHE'] = PATH os.environ['HF_HOME'] = PATH os.environ['HF_DATASETS_CACHE'] = PATH os.environ['TORCH_HOME'] = PATH import gradio as gr from interfaces.cap import demo as cap_demo from interfaces.manifesto import demo as manifesto_demo from interfaces.sentiment import demo as sentiment_demo from interfaces.emotion import demo as emotion_demo from interfaces.ner import demo as ner_demo from interfaces.ner import download_models as download_spacy_models from utils import download_hf_models with gr.Blocks() as demo: gr.Markdown( f"""

Babel Machine Demo

This is a demo for text classification using language models finetuned on data labeled by CAP, Manifesto Project, sentiment, emotion coding and Named Entity Recognition systems. For the coding of complete datasets, please visit the official Babel Machine site.
Please note that the sentiment (3) and emotions (8) models have been trained using parliamentary speech data, so the results for generic sentences may not be reliable. Additionally, be aware that named entity inputs are case sensitive.

""") gr.TabbedInterface( interface_list=[cap_demo, manifesto_demo, sentiment_demo, emotion_demo, ner_demo], tab_names=["CAP", "Manifesto", "Sentiment (3)", "Emotions (8)", "Named Entity Recognition"], ) if __name__ == "__main__": download_hf_models() download_spacy_models() demo.launch() # TODO: add all languages & domains