File size: 3,692 Bytes
53610ad
 
 
 
 
3946f8e
53610ad
 
 
3946f8e
 
53610ad
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3946f8e
 
 
53610ad
3946f8e
53610ad
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3e41fac
53610ad
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# -*- coding: utf-8 -*-

import gradio as gr
import requests
import os 
import json #

##Bloom
API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom"
# HF_TOKEN = os.environ["HF_TOKEN"]
# headers = {"Authorization": f"Bearer {HF_TOKEN}"}

def translate(prompt_ , from_lang, to_lang, input_prompt = "translate this", seed = 42): 

  prompt =  f"Instruction : Given an {from_lang} input sentence translate it into {to_lang} sentence. \n input : \"{prompt_}\" \n {to_lang} : " 
  if len(prompt) == 0:
    prompt = input_prompt 
    
  json_ = {
            "inputs": prompt,
            "parameters": {
                            "top_p": 0.9,
                            "temperature": 1.1,
                            "max_new_tokens": 250,
                            "return_full_text": False,
                            "do_sample": False,
                            "seed": seed,
                            "early_stopping": False,
                            "length_penalty": 0.0,
                            "eos_token_id": None,
                          }, 
          "options": {
              "use_cache": True,
              "wait_for_model": True,
                     },
        }
  response = requests.request("POST", API_URL,  json=json_) # headers=headers
  # output = response.json()
  output = json.loads(response.content.decode("utf-8"))
  output_tmp = output[0]['generated_text']
  solution = output_tmp.split(f"\n{to_lang}:")[0]  
  

  if '\n\n' in solution:
    final_solution = solution.split("\n\n")[0] 
  else:
    final_solution = solution
  return final_solution

demo = gr.Blocks()

with demo:
  gr.Markdown("<h1><center>Translate with Bloom</center></h1>")
  gr.Markdown('''
## Model Details
BLOOM is an autoregressive Large Language Model (LLM), trained to continue text 
from a prompt on vast amounts of text data using industrial-scale computational 
resources. As such, it is able to output coherent text in 46 languages and 13 
programming languages that is hardly distinguishable from text written by humans. 
BLOOM can also be instructed to perform text tasks it hasn't been explicitly trained 
for, by casting them as text generation tasks.

## Project Details
In this project we are going to explore the translation capabitlies of "BLOOM".

## How to use
At the moment this space has only capacity to translate between English, Spanish and Hindi languages.
from languange is the languge you put in text box and to langauge is to what language you are intended to translate.
Select from language from the drop down. 
Select to language from the drop down.

people are encouraged to improve this space by contributing.

this space is created by [Kishore](https://www.linkedin.com/in/kishore-kunisetty-925a3919a/) inorder to participate in [EuroPython22](https://huggingface.co/EuroPython2022)
please like the project to support my contribution to EuroPython22. 😊
''')
  with gr.Row():
    from_lang = gr.Dropdown(['English', 'Spanish', 'Hindi' , 'Bangla'], 
                            value='English', 
                            label='select From language : ')
    to_lang = gr.Dropdown(['English', 'Spanish', 'Hindi'], 
                          value='Hindi', 
                          label= 'select to Language : ')
 
  input_prompt = gr.Textbox(label="Enter the sentence : ", 
                            value=f"Instruction: ... \ninput: \"from sentence\" \n{to_lang} :",
                            lines=6)
  
  generated_txt = gr.Textbox(lines=3)

  b1 = gr.Button("translate")
  b1.click(translate,inputs=[ input_prompt, from_lang, to_lang], outputs=generated_txt) 
    
demo.launch(enable_queue=True, debug=True)