HeshamHaroon commited on
Commit
cbc5d9e
1 Parent(s): 4195e71

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -36
app.py CHANGED
@@ -2,17 +2,16 @@ from huggingface_hub import InferenceClient
2
  import gradio as gr
3
  from deep_translator import GoogleTranslator
4
 
5
- # Initialize the Hugging Face client with your model
6
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
7
 
8
- # Define the translation functions
9
  def translate_to_arabic(text):
10
- return GoogleTranslator(source='auto', target='ar').translate(text)
 
11
 
12
  def translate_to_english(text):
13
- return GoogleTranslator(source='auto', target='en').translate(text)
 
14
 
15
- # Format the prompt for the model
16
  def format_prompt(message, history):
17
  prompt = "<s>"
18
  for user_prompt, bot_response in history:
@@ -21,50 +20,28 @@ def format_prompt(message, history):
21
  prompt += f"[INST] {message} [/INST]"
22
  return prompt
23
 
24
- # Generate a response from the model
25
- def generate(prompt, history=[], temperature=0.1, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
26
- # Translate the Arabic prompt to English before sending to the model
27
  prompt_in_english = translate_to_english(prompt)
 
28
  formatted_prompt = format_prompt(prompt_in_english, history)
29
-
30
- generate_kwargs = {
31
- "temperature": temperature,
32
- "max_new_tokens": max_new_tokens,
33
- "top_p": top_p,
34
- "repetition_penalty": repetition_penalty,
35
- "do_sample": True,
36
- "seed": 42, # Seed for reproducibility, remove or change if randomness is preferred
37
- }
38
-
39
- # Generate the response
40
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
41
 
42
  output = ""
43
  for response in stream:
44
  output += response["token"]["text"]
45
-
46
  # Translate the English response back to Arabic
47
  response_in_arabic = translate_to_arabic(output)
48
  return response_in_arabic
49
 
50
- # Define additional inputs for Gradio interface
51
- additional_inputs = [
52
- gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05),
53
- gr.Slider(label="Max new tokens", value=256, minimum=0, maximum=1048, step=64),
54
- gr.Slider(label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1.0, step=0.05),
55
- gr.Slider(label="Repetition penalty", value=1.2, minimum=1.0, maximum=2.0, step=0.05)
56
- ]
57
-
58
- # Set up the Gradio interface
59
  iface = gr.Interface(
60
  fn=generate,
61
- inputs=[
62
- gr.Textbox(lines=5, placeholder='Type your Arabic query here...', label='Arabic Query'),
63
- *additional_inputs
64
- ],
65
  outputs='text',
66
- title="DorjGPT Arabic-English Translation Chatbot",
67
  )
68
 
69
- # Launch the Gradio interface
70
  iface.launch()
 
2
  import gradio as gr
3
  from deep_translator import GoogleTranslator
4
 
 
5
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
 
 
7
  def translate_to_arabic(text):
8
+ translator = GoogleTranslator(source='auto', target='ar')
9
+ return translator.translate(text)
10
 
11
  def translate_to_english(text):
12
+ translator = GoogleTranslator(source='auto', target='en')
13
+ return translator.translate(text)
14
 
 
15
  def format_prompt(message, history):
16
  prompt = "<s>"
17
  for user_prompt, bot_response in history:
 
20
  prompt += f"[INST] {message} [/INST]"
21
  return prompt
22
 
23
+ def generate(prompt, history=[]):
24
+ # Translate the Arabic prompt to English
 
25
  prompt_in_english = translate_to_english(prompt)
26
+ # Call the format_prompt function to format the input for the model
27
  formatted_prompt = format_prompt(prompt_in_english, history)
28
+
29
+ # Generate the response from the model
30
+ stream = client.text_generation(formatted_prompt, stream=True, details=True, return_full_text=False)
 
 
 
 
 
 
 
 
 
31
 
32
  output = ""
33
  for response in stream:
34
  output += response["token"]["text"]
35
+
36
  # Translate the English response back to Arabic
37
  response_in_arabic = translate_to_arabic(output)
38
  return response_in_arabic
39
 
 
 
 
 
 
 
 
 
 
40
  iface = gr.Interface(
41
  fn=generate,
42
+ inputs=[gr.Textbox(lines=5, placeholder='Type your Arabic query here...', label='Arabic Query')],
 
 
 
43
  outputs='text',
44
+ title="DorjGPT Arabic-English Translation Chatbot"
45
  )
46
 
 
47
  iface.launch()