<?php

namespace App\Common\Third\ChatGPT;

use GuzzleHttp\Client;
use Hyperf\Context\ApplicationContext;
use Hyperf\Contract\ConfigInterface;
use Orhanerday\OpenAi\OpenAi;

class OpenChatAi
{
    /**
     * @var mixed
     */
    private $chatConfig;

    public function __construct()
    {
        $container = ApplicationContext::getContainer();
        $this->chatConfig = $container->get(ConfigInterface::class)->get('chat_ai', );
        
        // print_r($this->chatConfig);
    }
    
    /**
     * Describe:
     * @param array $params
     * @return void
     * @throws \Psr\Container\ContainerExceptionInterface
     * @throws \Psr\Container\NotFoundExceptionInterface
     * Created by crh at 2023/07/01 22:35
     */
    public function chatStream(array $params = [])
    {
        // $client = \OpenAI::client($this->chatConfig['open_ai_api_key']);
        $client = \OpenAI::factory()
            ->withApiKey($this->chatConfig['open_ai_api_key'])
            ->withHttpClient(new Client(['verify'=>false,'proxy'=> $this->chatConfig['proxy_url']]))->make();

        $requestId = uniqid('chat_') . time();

        $requestParams = [
            'model'    => $this->chatConfig['model'],
            'messages' => [
                [
                    "role"    => "system",
                    "content" => $this->chatConfig['messages']['role_system_content'],
                ],
                [
                    "role"    => "user",
                    "content" => trim($params['role_content'] ?? '你好')
                ],
            ],
            'temperature'       => (int)$this->chatConfig['temperature'],
            'max_tokens'        => (int)$this->chatConfig['max_tokens'],
            'frequency_penalty' => (int)$this->chatConfig['frequency_penalty'],
            'presence_penalty'  => (int)$this->chatConfig['presence_penalty'],
            'stream' => true,
        ];

        writeLog('openai/chat', ['request_id' => $requestId, 'request_params' => $requestParams]);

        $streamResponse = $client->chat()->createStreamed($requestParams);

        $response = ApplicationContext::getContainer()->get(\Hyperf\HttpServer\Contract\ResponseInterface::class);

        $eventStream = new \Hyperf\Engine\Http\EventStream($response->getConnection());

        foreach ($streamResponse as $stream) {
            writeLog('openai/chat', ['request_id' => $requestId, 'stream' => $stream]);
            // $eventStream->write($stream->choices[0]->text);
            // if (!empty($stream->choices[0]->delta->content)) {
            //     // $eventStream->write($stream->choices[0]->delta->content);
            //     $eventStream->write(json_encode($stream->choices[0]) . PHP_EOL);
            // }
    
            $eventStream->write(json_encode($stream->choices[0], JSON_UNESCAPED_UNICODE) . PHP_EOL);
            
            // writeLog('openai/chat', [$stream->choices[0]->delta->content ?? '\n']);
            // $eventStream->write('hello ');
        }

        $eventStream->end();
        
        // return $response;
    }
    
    
    // TODO stream模式，此方式可行
    public function chatWithStream(array $params)
    {
        $requestId = uniqid('chat_') . time();
        try {
            $role_content = $params['role_content'] ?? '你好';
            
            $open_ai_key = $this->chatConfig['open_ai_api_key'];
            
            $open_ai = new OpenAi($open_ai_key);
            
            $proxy_url = $this->chatConfig['proxy_url']??'';
            
            //本地调试代理
            if (!empty($proxy_url)) {
                $open_ai->setProxy($proxy_url);
            }
            
            $requestParams = [
                'model'    => $this->chatConfig['model'],
                'messages' => [
                    [
                        "role"    => "system",
                        "content" => $this->chatConfig['messages']['role_system_content'],
                    ],
                    [
                        "role"    => "user",
                        "content" => $role_content
                    ],
                ],
                'temperature'       => (int)$this->chatConfig['temperature'],
                'max_tokens'        => (int)$this->chatConfig['max_tokens'],
                'frequency_penalty' => (int)$this->chatConfig['frequency_penalty'],
                'presence_penalty'  => (int)$this->chatConfig['presence_penalty'],
                'stream' => true,
            ];
            
            writeLog('openai/chat2', ['request_id' => $requestId, 'request_params' => $requestParams]);
    
    
            $response = ApplicationContext::getContainer()->get(\Hyperf\HttpServer\Contract\ResponseInterface::class);
    
            $eventStream = new \Hyperf\Engine\Http\EventStream($response->getConnection());
            
            $chat = $open_ai->chat($requestParams, function($curlInfo, $data) use ($requestId, $eventStream) {
                // echo 'log_data:' . $data . PHP_EOL . PHP_EOL;
                writeLog('openai/chat2', ['request_id' => $requestId, 'data' => $data]);
    
                // echo $data . "<br><br>";
                // echo PHP_EOL;
                // ob_flush();
                // flush();
                $eventStream->write($data);
                
                return strlen($data);
            });
    
            $eventStream->end();
           
        } catch (\Throwable $e) {
            throw $e;
        }
        
    }
}