DmitrMakeev's picture
Duplicate from bsenst/flask_inference_api
d4952da
raw
history blame
637 Bytes
import flask
from flask import request
import os
from dotenv import load_dotenv
load_dotenv()
app = flask.Flask(__name__, template_folder="./")
from transformers import pipeline
classifier = pipeline('text-classification', model="bsenst/classify_services_model")
@app.route('/')
def index():
return flask.render_template('index.html')
@app.route("/", methods=["POST"])
def predict():
incoming = request.get_json()
print(incoming)
prediction = classifier(incoming["text"])[0]
print(prediction)
return prediction
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(os.environ.get('PORT', 7860)))