;;; guile-openai --- An OpenAI API client for Guile
;;; Copyright © 2023 Andrew Whatson <whatson@tailcall.au>
;;;
;;; This file is part of guile-openai.
;;;
;;; guile-openai is free software: you can redistribute it and/or modify
;;; it under the terms of the GNU Affero General Public License as
;;; published by the Free Software Foundation, either version 3 of the
;;; License, or (at your option) any later version.
;;;
;;; guile-openai is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
;;; Affero General Public License for more details.
;;;
;;; You should have received a copy of the GNU Affero General Public
;;; License along with guile-openai.  If not, see
;;; <https://www.gnu.org/licenses/>.

(define-module (openai chat)
  #:use-module (openai api chat)
  #:use-module (openai client)
  #:use-module (openai utils colorized)
  #:use-module (ice-9 match)
  #:use-module (srfi srfi-9)
  #:use-module (srfi srfi-9 gnu)
  #:use-module (srfi srfi-41)
  #:export (openai-default-chat-model
            openai-default-chat-temperature
            openai-default-chat-top-p

            chat?
            chat-content
            chat-stream

            openai-chat))

(define-once openai-default-chat-model
  (make-parameter 'gpt-3.5-turbo))

(define-once openai-default-chat-temperature
  (make-parameter *unspecified*))

(define-once openai-default-chat-top-p
  (make-parameter *unspecified*))

(define-once openai-default-chat-stream?
  (make-parameter #t))

(define-record-type <Chat>
  (%make-chat content stream)
  chat?
  (content %chat-content)
  (stream chat-stream))

(define (chat-content chat)
  (force (%chat-content chat)))

(define (make-chats result n)
  (define make-one-chat
    (if (stream? result)
        (lambda (ix)
          (let* ((strm (responses->content-stream result ix))
                 (cont (delay (string-concatenate (stream->list strm)))))
            (%make-chat cont strm)))
        (lambda (ix)
          (let* ((cont (delay (response->content result ix)))
                 (strm (stream (force cont))))
            (%make-chat cont strm)))))
  (apply values (map make-one-chat (iota n))))

(define (response->content response n)
  (chat-message-content
   (chat-choice-message
    (list-ref (chat-response-choices response) n))))

(define (responses->content-stream responses n)
  (stream-let loop ((responses responses))
    (if (stream-null? responses)
        stream-null
        (let* ((response (stream-car responses))
               (choice   (car (chat-response-choices response)))
               (index    (chat-choice-index choice))
               (delta    (chat-choice-delta choice))
               (content  (chat-message-content delta)))
          (cond ((not (eqv? index n))        ;; ignore wrong index
                 (loop (stream-cdr responses)))
                ((unspecified? content)      ;; ignore unspecified content
                 (loop (stream-cdr responses)))
                (else
                 (stream-cons content (loop (stream-cdr responses)))))))))

(define (print-chat chat port)
  (newline port)
  (stream-for-each (lambda (content)
                     (display content port))
                   (chat-stream chat)))

(set-record-type-printer! <Chat> print-chat)

(add-color-scheme! `((,chat? CHAT ,color-stream (GREEN BOLD))))

(define parse-prompt
  (match-lambda
    ((? pair? msgs)
     (map parse-message msgs))
    (msg
     (list (parse-message msg)))))

(define parse-message
  (match-lambda
    ((? string? msg)
     (make-chat-message "user" msg))
    (((and role (or 'system 'user 'assistant)) . (? string? msg))
     (make-chat-message (symbol->string role) msg))))

(define* (openai-chat prompt #:key
                      (model             (openai-default-chat-model))
                      (temperature       (openai-default-chat-temperature))
                      (top-p             (openai-default-chat-top-p))
                      (n                 *unspecified*)
                      (stream?           (openai-default-chat-stream?))
                      (stop              *unspecified*)
                      (max-tokens        *unspecified*)
                      (presence-penalty  *unspecified*)
                      (frequency-penalty *unspecified*)
                      (logit-bias        *unspecified*)
                      (user              (openai-default-user)))
  "Send a chat completion request.  Returns a chat record.

The PROMPT can be a string, which will be sent as a user message.
Alternatively, prompt can be a list of `(role . content)' pairs, where
content is a string and role is a symbol `system', `user', or
`assistant'.

The keyword arguments correspond to the request parameters described
in the chat completion request documentation:

#:n - The number of responses to generate, returned as multiple
values.

#:stream? - Whether to stream the response(s), defaults to `#t'.

#:model - A symbol or string identifying the model to use.  Defaults
to `gpt-3.5-turbo', but if you're lucky you might be able to use
`gpt-4' or `gpt-4-32k' here.

#:temperature - The sampling temperature to use, a number between 0
and 2.

#:top-p - An alternative sampling parameter, a number between 0 and 1.

#:user - An optional username to associate with this request.

The `#:stop', `#:max-tokens', `#:logit-bias', `#:presence-penalty',
`#:frequency-penalty' parameters are implemented but untested."
  (let* ((model (if (symbol? model) (symbol->string model) model))
         (prompt (parse-prompt prompt))
         (stream? (or stream? *unspecified*))
         (request (make-chat-request model prompt
                                     temperature top-p n stream? stop max-tokens
                                     presence-penalty frequency-penalty logit-bias user))
         (response (send-chat-request request)))
    (make-chats response (if (unspecified? n) 1 n))))
