from flask import Flask, jsonify, render_template, request, make_response import requests import transformers from huggingface_hub import cached_download import torch from torch import nn import re import numpy as np import pandas as pd from collections import OrderedDict # import requests # from bs4 import BeautifulSoup app = Flask(__name__) headers = {"Authorization": f"Bearer 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"} API_URL = "https://api-inference.huggingface.co/models/nlptown/bert-base-multilingual-uncased-sentiment" def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() @app.route('/', methods=['GET']) def get(): data = query({"inputs": "The movie is good"}) return data @app.route('/', methods=['POST']) def predict(): message = "This is good movies" #request.form['message'] # choice of the model results = get_prediction(message, dictOfModels['BERT']) # get_prediction(message, dictOfModels['request.form.get("model_choice")']) print(f'User selected model : {request.form.get("model_choice")}') my_prediction = f'The feeling of this text is {results[0]["label"]} with probability of {results[0]["score"]*100}%.' return render_template('result.html', text = f'{message}', prediction = my_prediction) # @app.route('/') # def home(): # print(1) # return {'key':"Hello HuggingFace! Successfully deployed. "} # # model = load_checkpoint('checkpoint.pth') # # print(2) # # res = sample(model, obj.maxlen, 'ap') # # print(3) # # return {'key':res}