Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,440 Bytes
52811e4 c9ca579 52811e4 822036b 52811e4 c9ca579 52811e4 822036b c9ca579 68bde08 c9ca579 68bde08 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
import re
import gradio as gr
from huggingface_hub import InferenceClient
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
system_instructions = "You will be provided with text, and your task is to classify task tasks are (text generation, image generation, pdf chat, image text to text, image classification, summarization, translation , tts) answer with only task do not say anything else and stop as soon as possible."
def classify_task(prompt):
generate_kwargs = dict(
temperature=0.5,
max_new_tokens=1024,
top_p=0.95,
repetition_penalty=1.0,
do_sample=True,
seed=42,
)
formatted_prompt = system_instructions + prompt + "[/INST]"
stream = client.text_generation(
formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
yield output
return output
# Create the Gradio interface
with gr.Blocks() as demo:
with gr.Row():
text_uesr_input = gr.Textbox(label="Enter text 📚")
output = gr.Textbox(label="Translation")
with gr.Row():
translate_btn = gr.Button("Translate 🚀")
translate_btn.click(fn=classify_task, inputs=text_uesr_input,
outputs=output, api_name="translate_text")
# Launch the app
if __name__ == "__main__":
demo.launch()
|