|
from flask import Flask, request, jsonify |
|
from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline |
|
import os |
|
|
|
local_path = "./models/roberta-large" |
|
|
|
os.environ["HF_HOME"] = "/app/hf_home" |
|
os.environ["TRANSFORMERS_CACHE"] = "/app/cache" |
|
|
|
model_id = "klue/roberta-large" |
|
|
|
if os.path.exists(local_path): |
|
print("π λͺ¨λΈ λ‘컬μμ λ‘λ μ€...") |
|
model = AutoModelForSequenceClassification.from_pretrained(local_path) |
|
tokenizer = AutoTokenizer.from_pretrained(local_path) |
|
else: |
|
print("β¬οΈ λͺ¨λΈ νκΉ
νμ΄μ€μμ λ€μ΄λ‘λ μ€...") |
|
model = AutoModelForSequenceClassification.from_pretrained( |
|
model_id, cache_dir=os.environ["HF_HOME"] |
|
) |
|
tokenizer = AutoTokenizer.from_pretrained(model_id, cache_dir=os.environ["HF_HOME"]) |
|
os.makedirs(local_path, exist_ok=True) |
|
model.save_pretrained(local_path) |
|
tokenizer.save_pretrained(local_path) |
|
|
|
app = Flask(__name__) |
|
|
|
print("π λͺ¨λΈ λ‘λ μλ£") |
|
|
|
|
|
@app.route("/generate", methods=["GET"]) |
|
def generate(): |
|
return jsonify({"result": "generate/get"}) |
|
|
|
|
|
@app.route("/generate", methods=["POST"]) |
|
def generate_post(): |
|
data = request.json |
|
print(data) |
|
return jsonify({"result": "generate/post"}) |
|
|
|
|
|
@app.route("/", methods=["GET"]) |
|
def index(): |
|
return jsonify({"result": "success"}) |
|
|
|
|
|
if __name__ == "__main__": |
|
app.run(host="0.0.0.0", port=7860) |
|
|