Spaces:
Build error
Build error
| # -*- coding: utf-8 -*- | |
| import gradio as gr | |
| import requests | |
| import os | |
| import json # | |
| ##Bloom | |
| API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom" | |
| # HF_TOKEN = os.environ["HF_TOKEN"] | |
| # headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| def translate(prompt_ , from_lang, to_lang, input_prompt = "translate this", seed = 42): | |
| prompt = f"Instruction : Given an {from_lang} input sentence translate it into {to_lang} sentence. \n input : \"{prompt_}\" \n {to_lang} : " | |
| if len(prompt) == 0: | |
| prompt = input_prompt | |
| json_ = { | |
| "inputs": prompt, | |
| "parameters": { | |
| "top_p": 0.9, | |
| "temperature": 1.1, | |
| "max_new_tokens": 250, | |
| "return_full_text": False, | |
| "do_sample": False, | |
| "seed": seed, | |
| "early_stopping": False, | |
| "length_penalty": 0.0, | |
| "eos_token_id": None, | |
| }, | |
| "options": { | |
| "use_cache": True, | |
| "wait_for_model": True, | |
| }, | |
| } | |
| response = requests.request("POST", API_URL, json=json_) # headers=headers | |
| # output = response.json() | |
| output = json.loads(response.content.decode("utf-8")) | |
| output_tmp = output[0]['generated_text'] | |
| solution = output_tmp.split(f"\n{to_lang}:")[0] | |
| if '\n\n' in solution: | |
| final_solution = solution.split("\n\n")[0] | |
| else: | |
| final_solution = solution | |
| return final_solution | |
| demo = gr.Blocks() | |
| with demo: | |
| gr.Markdown("<h1><center>Translate with Bloom</center></h1>") | |
| gr.Markdown(''' | |
| ## Model Details | |
| BLOOM is an autoregressive Large Language Model (LLM), trained to continue text | |
| from a prompt on vast amounts of text data using industrial-scale computational | |
| resources. As such, it is able to output coherent text in 46 languages and 13 | |
| programming languages that is hardly distinguishable from text written by humans. | |
| BLOOM can also be instructed to perform text tasks it hasn't been explicitly trained | |
| for, by casting them as text generation tasks. | |
| ## Project Details | |
| In this project we are going to explore the translation capabitlies of "BLOOM". | |
| ## How to use | |
| At the moment this space has only capacity to translate between English, Spanish and Hindi languages. | |
| from languange is the languge you put in text box and to langauge is to what language you are intended to translate. | |
| Select from language from the drop down. | |
| Select to language from the drop down. | |
| people are encouraged to improve this space by contributing. | |
| this space is created by [Kishore](https://www.linkedin.com/in/kishore-kunisetty-925a3919a/) inorder to participate in [EuroPython22](https://huggingface.co/EuroPython2022) | |
| please like the project to support my contribution to EuroPython22. π | |
| ''') | |
| with gr.Row(): | |
| from_lang = gr.Dropdown(['English', 'Spanish', 'Hindi' , 'Bangla'], | |
| value='English', | |
| label='select From language : ') | |
| to_lang = gr.Dropdown(['English', 'Spanish', 'Hindi'], | |
| value='Hindi', | |
| label= 'select to Language : ') | |
| input_prompt = gr.Textbox(label="Enter the sentence : ", | |
| value=f"Instruction: ... \ninput: \"from sentence\" \n{to_lang} :", | |
| lines=6) | |
| generated_txt = gr.Textbox(lines=3) | |
| b1 = gr.Button("translate") | |
| b1.click(translate,inputs=[ input_prompt, from_lang, to_lang], outputs=generated_txt) | |
| demo.launch(enable_queue=True, debug=True) | |