Lenylvt commited on
Commit
ff6eafd
·
verified ·
1 Parent(s): 7d2e91b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -9
app.py CHANGED
@@ -6,14 +6,8 @@ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
  def translate_text(input_text, target_language):
7
  prompt = f"Translate the following text into {target_language}: {input_text}"
8
  response = client.text_generation(prompt)
9
-
10
- # Since the model's response includes the prompt, we extract only the translated text
11
- # Assuming the translated text follows immediately after the prompt
12
- translated_text = response[0]['generated_text']
13
- # Clean the response to display only the translated part
14
- # This might need to be adjusted based on how the model includes the prompt in its response
15
- clean_translation = translated_text[len(prompt):].strip()
16
- return clean_translation
17
 
18
  iface = gr.Interface(
19
  fn=translate_text,
@@ -26,4 +20,4 @@ iface = gr.Interface(
26
  description="Translate text to your specified language using the Mixtral model from Hugging Face."
27
  )
28
 
29
- iface.launch()
 
6
  def translate_text(input_text, target_language):
7
  prompt = f"Translate the following text into {target_language}: {input_text}"
8
  response = client.text_generation(prompt)
9
+ translated_text = response['generated_text'] if 'generated_text' in response else "Translation error or model response format has changed."
10
+ return translated_text
 
 
 
 
 
 
11
 
12
  iface = gr.Interface(
13
  fn=translate_text,
 
20
  description="Translate text to your specified language using the Mixtral model from Hugging Face."
21
  )
22
 
23
+ iface.launch()