|
from fastapi import HTTPException |
|
from api.config import MODEL_PROVIDER_MAPPING |
|
from api.models import ChatRequest |
|
from api.provider import blackboxai, gizai |
|
|
|
async def process_streaming_response(request: ChatRequest): |
|
provider_name = MODEL_PROVIDER_MAPPING.get(request.model) |
|
if provider_name == 'blackboxai': |
|
return await blackboxai.process_streaming_response(request) |
|
elif provider_name == 'gizai': |
|
return await gizai.process_streaming_response(request) |
|
else: |
|
raise HTTPException(status_code=400, detail=f"Model {request.model} is not supported for streaming.") |
|
|
|
async def process_non_streaming_response(request: ChatRequest): |
|
provider_name = MODEL_PROVIDER_MAPPING.get(request.model) |
|
if provider_name == 'blackboxai': |
|
return await blackboxai.process_non_streaming_response(request) |
|
elif provider_name == 'gizai': |
|
return await gizai.process_non_streaming_response(request) |
|
else: |
|
raise HTTPException(status_code=400, detail=f"Model {request.model} is not supported.") |
|
|