# Azure OpenAI 实现
from fastapi.responses import StreamingResponse
from openai import AzureOpenAI
from aiClients.baseAiClient import BaseAIClient, ModelInfo

class AzureOpenAiClient(BaseAIClient):
    def __init__(self, model_info:ModelInfo):
        self.client = AzureOpenAI(
            api_key=model_info.key,
            api_version=model_info.version,
            azure_endpoint=model_info.base_url
        )
        self.model_info = model_info

    def query(self, messages, **kwargs):
        response = self.client.chat.completions.create(
            messages=messages,
            model=self.model_info.model,
            max_tokens=self.model_info.max_tokens,
            temperature=self.model_info.temperature,
            stream=self.model_info.streaming,
            **kwargs
        )
        return response
        if self.model_info.streaming:
            async def stream_generator():
                async for chunk in response:
                    yield f"data: {chunk.choices[0].delta.content}\n\n"
            return StreamingResponse(stream_generator(), media_type="text/event-stream")
        else:
            return response.choices[0].message.content
    

