import {getLlama, LlamaChatSession} from "node-llama-cpp"
import { PassThrough } from 'stream'
import {fileURLToPath} from "url"
import path from "path"

const __dirname = path.dirname(fileURLToPath(import.meta.url))

const service = {
  model: null,
  context: null,
  session: null,
  load: async (modelName) => {
    if(!modelName){ throw new Error("model name is required") }
    if(service.model && service.model.filename == modelName){ return }
    if(service.session){
      await service.session.dispose()
      service.session = null
    }
    if(service.context){
      await service.context.dispose()
      service.context = null
    }
    if(service.model){
      await service.model.dispose()
      service.model = null
    }
    console.log('load model', modelName)
    const llama = await getLlama()
    service.model = await llama.loadModel({
        modelPath: path.join(__dirname, "models", modelName)
    })
    service.context = await service.model.createContext()
    service.session = new LlamaChatSession({
      contextSequence: service.context.getSequence()
    })
  },
  complete: async(sessionId, modelName, messages) => {
    await service.load(modelName)
    const historys = messages.slice(0, -1).map(m=>{
      if(m.role == "system"){ return { type:"system", text: m.content } }
      if(m.role == "user"){ return { type:"user", text: m.content } }
      if(m.role == "assistant"){ return { type:"model", response: [m.content] } }
      return null
    }).filter(m=>m)
    const input = messages[messages.length - 1]
    if(service.session.id != sessionId){
      service.session.setChatHistory(historys)
      service.session.id = sessionId
      console.log('change history', sessionId)
    }
    const stream = new PassThrough()
    service.session.prompt(input.content, {
      onTextChunk(chunk) {
        const data = {"choices":[{"delta":{"content": chunk }}]}
        stream.write(`data: ${JSON.stringify(data)}\n`)
      }
    }).then(()=>{
      stream.write(`data: [DONE]\n`)
      stream.end()
    })
    return stream
  },
}


export default service