from flask import Flask from flask import request from langchain_community.llms import Ollama app = Flask(__name__) llm = Ollama(model="phi3") @app.route('/completion', methods=['POST']) def completion(): """ { user: , text: } """ message = request.get_json() llm_output = llm.invoke(message['text']) print(llm_output) return llm_output # curl -v -X POST 'http://127.0.0.1:8000/completion' --header 'Content-Type: application/json' --data '{"user": "test-user", "text": "What do you know about 3D graphics"}'