import { BaseModelProvider } from "./base";
import OpenAI from "openai";
import axios, { Axios } from "axios";
import { ToolProviderFactory } from "../tool";
import { ModelInfo, ModelMesssage, ChunkMessage, UserBlance } from "./types";

export class DeepseekModelProvider extends BaseModelProvider {
	  openai?: OpenAI;
		info?: ModelInfo;
		api?: Axios;
		systemPropmt = '';
		init(modelInfo: ModelInfo): void {
			this.openai = new OpenAI({
				apiKey: modelInfo.apiKey,
				baseURL: modelInfo.baseUrl
			});
			this.api = axios.create({
				baseURL: modelInfo.baseUrl,
				headers: {
					'Accept': 'application/json',
					'Authorization': `Bearer ${modelInfo.apiKey}`
				}
			});
			this.info = modelInfo;
		}

		async *createMessage(messages: ModelMesssage[]): AsyncGenerator<ChunkMessage> {
			const params: OpenAI.Chat.ChatCompletionCreateParams = {
				model: this.info?.model ?? 'deepseek-chat',
				temperature: parseFloat(this.info?.temperature?? '0'),
				messages: this.formatMessages(messages),
				max_completion_tokens: parseInt(this.info?.maxTokens ?? '0', 10),
				stream: true,
				stream_options: { include_usage: true },
				tools: ToolProviderFactory.listTool().map((item) => {
					return {
						type: 'function',
						function: {
							name: item.name,
							description: item.description,
							parameters: item.parameters
						}
					};
				})
			};
			const stream = await this.openai?.chat.completions.create(params);
			if(!stream) {
				yield {
					type: 'error',
					content: '调用失败'
				} as ChunkMessage;
				return;
			}
			for await (const chunk of stream) {
				const delta = chunk.choices[0]?.delta;
				if (delta?.content) {
					yield {
						type: 'text',
						content: delta.content
					};
				}
				if(delta && 'reasoning_content' in delta && delta.reasoning_content) {
					yield {
						type: 'reasoning',
						reasoning: (delta?.reasoning_content as string | undefined) ?? ''
					};
				}
				if (delta?.tool_calls) {
					yield {
						type: 'tool_calls',	
						tool_calls: delta.tool_calls.map((item) => {
							return {
								id: item.id,
								type: item.type,
								index: item.index,
								function: {
									name: item.function?.name,
									arguments: item.function?.arguments
								}
							};	
						})
					};
				}
				if(chunk.usage) {
					const usage = chunk.usage as DeepseekUsage;
					yield {
						type: 'usage',
						usage: {
							prompt_tokens: usage.prompt_tokens,
							completion_tokens: usage.completion_tokens,
							prompt_cache_hit_tokens: usage.prompt_cache_hit_tokens,
							completion_cache_hit_tokens: usage.completion_cache_hit_tokens
						}
					};
				}
			}
		}

		async getUserBlance(): Promise<UserBlance|undefined> {
			return await this.api?.get('/user/balance').then((res) => {
				const balance_infos = res.data.balance_infos;
				return {
					balance: balance_infos[0].balance,
					currency: balance_infos[0].currency,
					available: res.data.is_available
				} as UserBlance;
			});
		}

		formatMessages(messages: ModelMesssage[]): OpenAI.Chat.ChatCompletionMessageParam[] {
			return messages.map((message) => ({
				role: message.role,
				content: message.content
			})  as OpenAI.Chat.ChatCompletionMessageParam);
		}
}

interface DeepseekUsage extends OpenAI.CompletionUsage {
	prompt_cache_hit_tokens?: number;
	completion_cache_hit_tokens?: number;
}