import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js';
import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
import { OpenAI } from 'openai';

// sse
// const sseTransport = new SSEClientTransport(new URL('http://localhost:3000/sse'));

// stdio
const transport = new StdioClientTransport({
  command: 'node',
  args: ['./dist/main.js']
});


const client = new Client({
  name: 'my-mcp',
  version: '1.0.0'
});

// stdio
await client.connect(transport);

// sse
// await client.connect(sseTransport);

const tools_list = await client.listTools();

const tools = tools_list.tools.map((tool) => {
  return {
    type: 'function',
    function: {
      name: tool.name,
      description: tool.description || '',
      parameters: {
        type: tool.inputSchema.type,
        properties: tool.inputSchema.properties,
        required: tool.inputSchema.required
      }
    }
  };
});

const openai = new OpenAI({
  baseURL: 'https://api.siliconflow.cn/v1/',
  apiKey: process.env.OPENAI_API_KEY
});

/**
 * @type {Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>}
 */
let messages = [{ role: 'user', content: '现在几点,北京是什么天气' }];

const response = await openai.chat.completions.create({
  model: 'Qwen/Qwen2.5-7B-Instruct',
  messages,
  tools,
  tool_choice: 'auto'
});

messages.push(response.choices[0].message);

// console.log(response.choices[0].message);

if (
  !response.choices[0].message.tool_calls ||
  response.choices[0].message.tool_calls.length === 0
) {
  console.log("The model didn't use the function. Its response was:");
  console.log(response.choices[0].message.content);

  // return
  await client.close();
  process.exit(0);
}

if (response.choices[0].message.tool_calls) {
  for (const tool of response.choices[0].message.tool_calls) {
    console.log('Tool call:', tool);
    const functionResponse = await client.callTool({
      name: tool.function.name,
      arguments: JSON.parse(tool.function.arguments)
    });
    console.log('Function response:', functionResponse.content[0].text);
    messages.push({
      role: 'tool',
      content: functionResponse.content[0].text,
      tool_call_id: tool.id
    });
  }
}

const finalResponse = await openai.chat.completions.create({
  model: 'Qwen/Qwen2.5-7B-Instruct',
  messages,
  stream: true
});

console.log('\n');
for await (const chunk of finalResponse) {
  process.stdout.write(chunk.choices[0].delta.content);
}

await client.close();
