<?php

namespace thinkct\library;

use app\admin\model\ai\model\Key as keyModel;
use app\admin\model\ai\Model as modelModel;
use orhanerday\OpenAi;

class ChatAI
{
    private string $last = '';

    /**
     * 发送消息
     */
    public function sendText($content, $prompt = '', $model = '')
    {
        if(is_array($content)) {
            $messages = $content;
        } else {
            $messages = [
                ['role' => 'system', 'content' => $prompt ?? '你是一个智能机器人'],
                ['role' => 'user', 'content' => $content]
            ];
        }

        // 如果model为空则随机获取一个模型
        if(empty($model)) {
            $model = $this->getModelKey();
        }

        $open_ai = new OpenAi($model->key);

        // 如果api_url不为空则设置api_url
        if ($model->api_url) {
            $open_ai->setBaseURL($model->api_url);
        }

        $data = [
            'model' => $model->model,
            'messages' => $messages,
            'stream' => true,
        ];

        // 如果是gpt-4-vision-preview
        if (request()->post('file_list')) {
            $data['max_tokens'] = input('max_tokens', 4000);
        } else {
            // 转成数字
            $data['temperature'] = floatval(input('temperature', 0.6));
            $data['frequency_penalty'] = (int) input('frequency_penalty', 0);
            $data['presence_penalty'] = (int) input('presence_penalty', 0);
            $data['max_tokens'] = (int) input('max_tokens', 6000);
        }

        //die(var_dump($data));

        // 如果是gpt-4-1106-vision-preview
        $chat = $open_ai->chat($data, function ($curl, $data) {
            echo $data;

            //设置响应头，确保浏览器正确解析返回的数据流
            if (!headers_sent() && !in_array('content-type', array_map('strtolower', headers_list()))) {
                header('Content-Type: text/event-stream');
                header('Connection: keep-alive');
                header('Cache-Control: no-cache');
                header('Access-Control-Allow-Origin: *');
                header('X-Accel-Buffering: no');
            }

            $prefix = 'data:';
            if (str_starts_with($data, $prefix)) {
                $obj = explode('data: ', $data);
                foreach ($obj as $key => $value) {
                    if ($key == 0) {
                        continue;
                    }
                    $value = json_decode($value,true);
                    if (isset($value['choices'][0]['delta']['content'])) {
                        $this->last .= $value['choices'][0]['delta']['content'];
                    }
                }
            }

            // 判断stream流是否结束
            if (strpos($data, 'DONE')) {
                //$message = Message::where('id', $this->message_id)->find();
                //$message->content = $this->last;
                //$message->save();
            }

            ob_flush();
            flush();
            return strlen($data);
        });

        return $this->last;
    }


    // 获取可用的模型
    public function getModel()
    {
        $model = modelModel::where('status', 1)->select();
        // 遍历数据 删除key没有的模型
        $model_list = [];
        foreach ($model as $key => $value) {
            $key = keyModel::where(['status'=>1])
                ->where("FIND_IN_SET(" . $value->id . ", model_ids)")
                ->find();
            if($key) {
                $model_list[] = $value;
            }
        }
        return $model_list;
    }

    // 获取可用的模型key
    public function getModelKey($model_id = '')
    {
        // 如果model_id不为空则获取指定模型
        if ($model_id) {
            $model = modelModel::where('id', $model_id)->find();
        } else {
            $model = modelModel::where('default', 1)->find();
        }
        // 随机获取一个key
        $model_key = keyModel::where('status', 1)
            ->where("FIND_IN_SET(" . $model->id . ", model_ids)")
            ->orderRaw('RAND()')
            ->limit(1)
            ->find();

        if($model_key) {
            $model_key->model = $model->model;
        }
        return $model_key;
    }
}