package llmserviceopenai.routes

import io.ktor.server.application.call
import io.ktor.server.request.receive
import io.ktor.server.response.respond
import io.ktor.server.routing.*
import llmserviceopenai.model.EmbeddingRequest
import llmserviceopenai.service.EmbeddingService


fun Route.embeddingRoutes(embeddingService: EmbeddingService) {

    post("/v1/embedding"){

        // 拿请求
        val request = call.receive<EmbeddingRequest>()


        // 调用
        val embeddingResponse = embeddingService.embeddingHandler(request)


        // 返回
        call.respond(embeddingResponse)
    }

}
