<script setup>
import { Ollama } from 'ollama'

async function fetchResponse() {
  const ollama = new Ollama({ host: 'http://127.0.0.1:11434' })
  let prompt = '你叫什么名字?'
  try {
    const response = await ollama.chat({
      model: 'deepseek-r1:7b',
      messages: [{ role: 'user', content: prompt }],
      stream: true,
    })
    console.log(response.message.content)
  } catch (error) {
    console.log('eee:', error)
  }
}
fetchResponse()
</script>

<template>
  <!-- <div class="container">API-KEY:sk-161ce3cea7f44e2ba3d5c569fa063ba6</div> -->
  <div>12</div>
</template>

<style scoped></style>
