// 测试GPT————————————————————————————————————————————————————
const OpenAI = require('openai');
 
const http = require('http');
const { HttpsProxyAgent } = require('https-proxy-agent');

console.log('proxy url:', process.env.PROXY_URL)

const client = new OpenAI({
    apiKey: 'sk-proj-nBMQHajxfxDTJZru4ljHT3BlbkFJx3xKewE0blm9OXFV6P70',
    httpAgent: new HttpsProxyAgent(process.env.PROXY_URL),
    timeout: 60 * 1000 * 5,
});

// Override per-request:
openai.models.list({
    httpAgent: new http.Agent({ keepAlive: false }),
});

async function main() {
  const runner = client.beta.chat.completions
    .runTools({
      model: 'gpt-3.5-turbo',
      messages: [{ role: 'user', content: '你好' }],
      tools: [
        {
          type: 'function',
          function: {
            function: getCurrentLocation,
            parameters: { type: 'object', properties: {} },
          },
        },
        {
          type: 'function',
          function: {
            function: getWeather,
            parse: JSON.parse, // or use a validation library like zod for typesafe parsing.
            parameters: {
              type: 'object',
              properties: {
                location: { type: 'string' },
              },
            },
          },
        },
      ],
    })
    .on('message', (message) => console.log(message));

  const finalContent = await runner.finalContent();
  console.log();
  console.log('Final content:', finalContent);
}

async function getCurrentLocation() {
  return 'Boston'; // Simulate lookup
}

async function getWeather(args) {
  const { location } = args;
  // … do lookup …
  return { temperature, precipitation };
}

main();

// 测试GPT————————————————————————————————————————————————————