ai / backend /open_webui /utils /response.py
github-actions[bot]
GitHub deploy: d48a234bb91a479b618f7828665bdfa45fdc349b
d836a53
raw
history blame contribute delete
982 Bytes
import json
from open_webui.utils.misc import (
openai_chat_chunk_message_template,
openai_chat_completion_message_template,
)
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
model = ollama_response.get("model", "ollama")
message_content = ollama_response.get("message", {}).get("content", "")
response = openai_chat_completion_message_template(model, message_content)
return response
async def convert_streaming_response_ollama_to_openai(ollama_streaming_response):
async for data in ollama_streaming_response.body_iterator:
data = json.loads(data)
model = data.get("model", "ollama")
message_content = data.get("message", {}).get("content", "")
done = data.get("done", False)
data = openai_chat_chunk_message_template(
model, message_content if not done else None
)
line = f"data: {json.dumps(data)}\n\n"
yield line
yield "data: [DONE]\n\n"