from flask import Flask, request, jsonify
from transformers import pipeline
# Initialize Flask app
app = Flask(__name__)
# Load the Arabic-QwQ model (using Hugging Face pipeline for simplicity)
model_pipeline = pipeline(
"text-generation",
model="Omartificial-Intelligence-Space/Arabic-QWQ-32B-Preview"
)
@app.route('/')
def index():
"""Root endpoint, can serve an HTML form if desired."""
return """
Arabic-QwQ Model Demo
"""
@app.route('/predict', methods=["POST"])
def predict():
"""
Route for processing user input with the model.
- Accepts user input via POST request.
- Runs inference with Arabic-QwQ model.
- Returns response.
"""
try:
# Extract user input
user_input = request.form.get("prompt")
# Perform model inference
output = model_pipeline(user_input, max_length=50, num_return_sequences=1)
# Return inference results
return jsonify({
"input": user_input,
"response": output[0]['generated_text'] if output else "No response generated"
})
except Exception as e:
# Handle errors gracefully
return jsonify({"error": str(e)}), 500
# Run the app
if __name__ == "__main__":
app.run(debug=True)