import OpenAI from 'openai';
const openai = new OpenAI({
    baseURL: process.env.DASHSCOPE_BASE_URL,
    apiKey: process.env.DASHSCOPE_API_KEY,
});

export type Message = { 
    role: 'system' | 'user' | 'assistant',
    content: string
}

export type ChatRequest = { 
    model?: string,
    messages: Message[],
    stream?: boolean,
    temperature?: number,
    max_tokens?: number
}

export class OpenAIService { 
    aiClient: OpenAI;
    constructor(){
        this.aiClient = openai;
    }

    async generate(request: ChatRequest): Promise<string> { 
        const { stream } = request
        if (stream){
            return await this.stream(request)
        }
        return await this.complete(request)
    }

    //流式输出
    async stream(request: ChatRequest): Promise<string> { 
        const { 
            model='qwen-max', 
            messages,
            temperature = 0.9,
            max_tokens = 1024 
        } = request
        try {
            const stream = await this.aiClient.chat.completions.create({
                model,
                stream: true,
                messages,
                temperature,
                max_tokens,
            });
            let result = '';
            
            // 正确的流处理方式
            for await (const chunk of stream) {
                const content = chunk.choices[0]?.delta?.content || '';
                result += content;
                console.log(content);
            }
            
            return result;
        } catch (error) {
            console.error('Stream error:', error);
            throw error;
        }
    }

    // 非流式输出（可选）
    async complete(request: ChatRequest): Promise<string> {
        const { 
            model = 'qwen-max',
            messages,
            temperature = 0.9,
            max_tokens = 1024 
        } = request;
        try {
            const completion = await this.aiClient.chat.completions.create({
                model,
                stream: false,
                messages,
                temperature,
                max_tokens,
            });
            return completion.choices[0]?.message?.content || '';
        } catch (error) {
            console.error('Completion error:', error);
            throw error;
        }
    }
}

export const aiClient = new OpenAIService();