|
"""Translate via Bloom.""" |
|
|
|
import os |
|
import time |
|
from textwrap import dedent |
|
|
|
import gradio as gr |
|
import httpx |
|
from logzero import logger |
|
|
|
|
|
os.environ["TZ"] = "Asia/Shanghai" |
|
try: |
|
time.tzset() |
|
except Exception: |
|
logger.warning("Wont work in Windows...") |
|
|
|
|
|
api_url = "https://api-inference.huggingface.co/models/bigscience/bloom" |
|
timeout_ = httpx.Timeout(None, connect=10) |
|
|
|
|
|
def bloom_tr(prompt_, from_lang, to_lang, input_prompt="translate this", seed=2, timeout=timeout_): |
|
"""Translate via Bloom.""" |
|
prompt = dedent( |
|
f""" |
|
Instruction : Given an {from_lang} input sentence translate it into {to_lang} sentence. \n input : {prompt_} \n {to_lang} : |
|
""" |
|
).strip() |
|
|
|
if not prompt: |
|
prompt = input_prompt |
|
|
|
json_ = { |
|
"inputs": prompt, |
|
"parameters": { |
|
"top_p": 0.9, |
|
"temperature": 1.1, |
|
"max_new_tokens": 250, |
|
"return_full_text": False, |
|
"do_sample": False, |
|
"seed": seed, |
|
"early_stopping": False, |
|
"length_penalty": 0.0, |
|
"eos_token_id": None, |
|
}, |
|
"options": { |
|
"use_cache": True, |
|
"wait_for_model": True, |
|
}, |
|
} |
|
|
|
|
|
|
|
try: |
|
response = httpx.post(api_url, json=json_, timeout=timeout) |
|
except Exception as exc: |
|
logger.error(exc) |
|
return str(exc) |
|
|
|
|
|
try: |
|
output = response.json() |
|
|
|
except Exception as exc: |
|
logger.error(exc) |
|
return f"Unable to fetch anything: {exc}" |
|
|
|
try: |
|
output_tmp = output[0]["generated_text"] |
|
return output_tmp |
|
except Exception as exc: |
|
logger.error(exc) |
|
return f"Unable to retrieve result, previous output: {output}" |
|
|
|
solution = output_tmp.split(f"\n{to_lang}:")[0] |
|
|
|
if "\n\n" in solution: |
|
final_solution = solution.split("\n\n")[0] |
|
else: |
|
final_solution = solution |
|
|
|
try: |
|
_ = final_solution.splitlines()[-1] |
|
except Exception as exc: |
|
logger.error(exc) |
|
return str(exc) |
|
|
|
return _ |
|
|
|
|
|
langs = [ |
|
"German", |
|
"French", |
|
"Italian", |
|
"Japanese", |
|
"Russian", |
|
"Spanish", |
|
"Hindi", |
|
] |
|
|
|
demo = gr.Blocks() |
|
|
|
with demo: |
|
gr.Markdown("<h1><center>Translate with Bloom</center></h1>") |
|
gr.Markdown( |
|
dedent( |
|
""" |
|
## Model Details |
|
Refer to [the space](https://huggingface.co/spaces/EuroPython2022/Translate-with-Bloom) created by [Kishore](https://www.linkedin.com/in/kishore-kunisetty-925a3919a/) for participating in [EuroPython22](https://huggingface.co/EuroPython2022) |
|
please like his project to support his contribution to EuroPython22. |
|
""" |
|
).strip() |
|
) |
|
with gr.Row(): |
|
from_lang = gr.Dropdown( |
|
["English", "Chinese", ] + langs, |
|
value="English", |
|
label="select From language : ", |
|
) |
|
to_lang = gr.Dropdown( |
|
["Chinese", "English", ] + langs, |
|
value="Chinese", |
|
label="select to Language : ", |
|
) |
|
|
|
input_prompt = gr.Textbox( |
|
label=f"Enter a sentence in {from_lang}: ", |
|
value="This is a test, yet another test.", |
|
lines=4, |
|
) |
|
|
|
generated_txt = gr.Textbox(lines=4) |
|
|
|
b1 = gr.Button("translate") |
|
b1.click( |
|
|
|
bloom_tr, |
|
inputs=[input_prompt, from_lang, to_lang], |
|
outputs=generated_txt, |
|
) |
|
|
|
demo.launch(enable_queue=True, debug=True) |
|
|