import { ChatOpenAI } from 'langchain/chat_models'
import { Configuration } from "openai";
import {HumanMessage} from 'langchain/schema'


const model = new ChatOpenAI({
    streaming: true,
    openAIApiKey: 'sk-lNE98gLqgXyqxPSSSi1amHBuj8bEiHPUMb9tfuwJRsUChc9E',
    modelName: 'gpt-3.5-turbo'
},new Configuration ({
    apiKey: 'sk-lNE98gLqgXyqxPSSSi1amHBuj8bEiHPUMb9tfuwJRsUChc9E',
    basePath: "https://api.aiproxy.io/v1"
}))
const response = await model.call([
    new HumanMessage(
        "改写下面的文章：表示属性是可枚举的，即它会在对象的迭代中出现。"
    ),
],{
    callbacks: [
        {
            handleLLMNewToken(token) {
                console.log({
                    status: 'success',
                    text: token
                });
            },
        },
    ]
})
console.log(response);
