File size: 3,787 Bytes
303d329
 
1365be7
 
303d329
 
d5f2b4b
 
1365be7
 
 
 
 
 
 
882d6ad
303d329
 
 
 
 
 
 
 
 
 
f1b0e4e
 
303d329
f1b0e4e
 
303d329
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4a3cc38
303d329
 
882d6ad
303d329
 
 
4a3cc38
303d329
 
882d6ad
303d329
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1365be7
303d329
 
 
d5f2b4b
 
 
 
303d329
 
 
 
 
ec8204f
9396a3a
303d329
 
 
 
 
 
 
 
 
 
 
0dbcf6e
303d329
 
 
 
ec8204f
 
9396a3a
303d329
 
9396a3a
303d329
 
 
 
 
 
 
 
 
d5f2b4b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
"""Translate via Bloom."""
# pylint: disable=invalid-name
import os
import time
from textwrap import dedent

import gradio as gr
import httpx
from logzero import logger

# os.environ.setdefault("TZ", "Asia/Shanghai")
os.environ["TZ"] = "Asia/Shanghai"
try:
    time.tzset()
except Exception:
    logger.warning("Wont work in Windows...")  # Windows wont do

# Bloom
api_url = "https://api-inference.huggingface.co/models/bigscience/bloom"
timeout_ = httpx.Timeout(None, connect=10)


def bloom_tr(prompt_, from_lang, to_lang, input_prompt="translate this", seed=2, timeout=timeout_):
    """Translate via Bloom."""
    prompt = dedent(
        f"""
        Instruction : Given an {from_lang} input sentence translate it into {to_lang} sentence. \n input : {prompt_} \n {to_lang} :
        """
    ).strip()

    if not prompt:
        prompt = input_prompt

    json_ = {
        "inputs": prompt,
        "parameters": {
            "top_p": 0.9,
            "temperature": 1.1,
            "max_new_tokens": 250,
            "return_full_text": False,
            "do_sample": False,
            "seed": seed,
            "early_stopping": False,
            "length_penalty": 0.0,
            "eos_token_id": None,
        },
        "options": {
            "use_cache": True,
            "wait_for_model": True,
        },
    }

    # headers=headers
    # response = requests.request("POST", api_url, json=json_)
    try:
        response = httpx.post(api_url, json=json_, timeout=timeout)
    except Exception as exc:
        logger.error(exc)
        return str(exc)

    # output = json.loads(response.content.decode("utf-8"))
    try:
        output = response.json()
        # return output
    except Exception as exc:
        logger.error(exc)
        return f"Unable to fetch anything: {exc}"

    try:
        output_tmp = output[0]["generated_text"]
        return output_tmp
    except Exception as exc:
        logger.error(exc)
        return f"Unable to retrieve result, previous output: {output}"

    solution = output_tmp.split(f"\n{to_lang}:")[0]

    if "\n\n" in solution:
        final_solution = solution.split("\n\n")[0]
    else:
        final_solution = solution

    try:
        _ = final_solution.splitlines()[-1]
    except Exception as exc:
        logger.error(exc)
        return str(exc)

    return _


langs = [
    "German",
    "French",
    "Italian",
    "Japanese",
    "Russian",
    "Spanish",
    "Hindi",
]

demo = gr.Blocks()

with demo:
    gr.Markdown("<h1><center>Translate with Bloom</center></h1>")
    gr.Markdown(
        dedent(
            """
            ## Model Details
            Refer to [the space](https://huggingface.co/spaces/EuroPython2022/Translate-with-Bloom) created by [Kishore](https://www.linkedin.com/in/kishore-kunisetty-925a3919a/) for participating in [EuroPython22](https://huggingface.co/EuroPython2022)
            please like his project to support his contribution to EuroPython22.
            """
        ).strip()
    )
    with gr.Row():
        from_lang = gr.Dropdown(
            ["English", "Chinese", ] + langs,
            value="English",
            label="select From language : ",
        )
        to_lang = gr.Dropdown(
            ["Chinese", "English", ] + langs,
            value="Chinese",
            label="select to Language : ",
        )

        input_prompt = gr.Textbox(
            label=f"Enter a sentence in {from_lang}: ",
            value="This is a test, yet another test.",
            lines=4,
        )

    generated_txt = gr.Textbox(lines=4)

    b1 = gr.Button("translate")
    b1.click(
        # translate,
        bloom_tr,
        inputs=[input_prompt, from_lang, to_lang],
        outputs=generated_txt,
    )

demo.launch(enable_queue=True, debug=True)