import { ChatOpenAI } from '@langchain/openai';
import { FRIDAY_LLM_CONFIG } from './constants';
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { UseMcpServer } from './UseMcpServer';

//MCPSERVER
export class MCPServer {}

class UseOpenAi {
  private constructor() {}

  static getLLM(modelName = 'anthropic.claude-sonnet-4') {
    const llm = new ChatOpenAI({
      verbose: true, // 控制台日志
      model: modelName,
      temperature: 0.9,
      configuration: {
        baseURL: FRIDAY_LLM_CONFIG.API_BASE_URL,
        apiKey: FRIDAY_LLM_CONFIG.API_KEY,
      },
      maxTokens: 8000,
      // modelKwargs: {
      //   response_format: { type: 'json_object' },
      // },
    });
    // 配置返回值类型
    ///node_modules/openai/src/resources/shared.ts
    // llm.withConfig({ response_format: { type: 'json_object' } });

    return llm;
  }
  static async getAgent(modelName: string) {
    const llm = this.getLLM(modelName);
    const tools = await UseMcpServer.getTools();
    const agent = createReactAgent({ llm, tools });
    return agent;
  }
}

export { UseOpenAi };
