Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import pipeline | |
def main(): | |
# Load the model using Hugging Face's transformers pipeline | |
model = pipeline("text-generation", model="microsoft/Phi-3-mini-128k-instruct", trust_remote_code=True) | |
def generate_text(input_text): | |
# Generate text using the model | |
result = model(input_text, max_length=50) | |
return result[0]['generated_text'] | |
with gr.Blocks() as blocks: | |
with gr.Tab("Model Info"): | |
gr.Markdown(""" | |
# Microsoft/Phi-3-mini-128k-instruct | |
## Description | |
This model has 3.8 billion parameters, designed for lightweight and cutting-edge applications. Trained on the Phi-3 dataset, it emphasizes quality and dense reasoning suitable for various applications. | |
### Main Use Cases | |
- Ideal for environments with memory or processing limitations. | |
- Suitable for scenarios where latency is critical. | |
- Useful for solid reasoning needs, including code, mathematics, and logic. | |
### License and Use | |
The information provided should not be seen as a modification of the licensing under which the model is released. | |
[More information](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) | |
""") | |
with gr.Tab("Generate Text"): | |
input_text = gr.Textbox(placeholder="Type here to generate text") | |
output_text = gr.Textbox(label="Generated Text") | |
input_text.submit(fn=generate_text, inputs=input_text, outputs=output_text) | |
blocks.launch() | |
if __name__ == "__main__": | |
main() | |