
/**
 * @file    ai_conn.c
 * @author  Azolla (1228449928@qq.com)
 * @brief   讯飞AI大模型鉴权
 * @version 0.1
 * @date    2024-10-14
 * https://www.xfyun.cn/doc/spark/general_url_authentication.html#_1-2-%E9%89%B4%E6%9D%83%E5%8F%82%E6%95%B0
 * @copyright Copyright (c) 2024
 *               ┏┓      ┏┓
            ┏┛┻━━━┛┻┓
            ┃      ☃      ┃
            ┃  ┳┛  ┗┳  ┃
            ┃      ┻      ┃
            ┗━┓      ┏━┛
                ┃      ┗━━━┓
                ┃  神兽保佑    ┣┓
                ┃　永无BUG！   ┏┛
                ┗┓┓┏━┳┓┏┛
                  ┃┫┫  ┃┫┫
                  ┗┻┛  ┗┻┛
 * */
#include "esp_hmac.h"
#include "esp32s3/rom/efuse.h"
#include "mbedtls/base64.h"
#include "cJSON.h"
#include "esp_log.h"

#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/queue.h" 
#include "freertos/semphr.h" 
#include "freertos/event_groups.h"

#include "wifi_sntp.h"
#include "websocket.h"

#include "audio_i2s.h"
#include "audio_spiffs.h"
#include "audio_player.h"
#include "audio_tts.h"

#include "sparkai.h"

#define TAG     "sparkai"
#ifdef TAG
#define LOGI(format, ...)    ESP_LOGI(TAG, format, ##__VA_ARGS__)
#define LOGW(format, ...)    ESP_LOGW(TAG, format, ##__VA_ARGS__)
#define LOGE(format, ...)    ESP_LOGE(TAG, format, ##__VA_ARGS__)
#else
#define LOGI(format, ...)
#define LOGW(format, ...)
#define LOGE(format, ...)
#endif

//===================================================================================================================
//===================================================================================================================
#if 0 
请求地址: https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
Tips: 星火大模型API当前有Lite、Pro、Pro-128K、Max、Max-32K和4.0 Ultra六个版本，各版本独立计量tokens。
传输协议 ：ws(s),为提高安全性，强烈推荐wss
Spark4.0 Ultra 请求地址，对应的domain参数为4.0Ultra：
    wss://spark-api.xf-yun.com/v4.0/chat
Spark Max-32K请求地址，对应的domain参数为max-32k
    wss://spark-api.xf-yun.com/chat/max-32k
Spark Max请求地址，对应的domain参数为generalv3.5
    wss://spark-api.xf-yun.com/v3.5/chat
Spark Pro-128K请求地址，对应的domain参数为pro-128k：
    wss://spark-api.xf-yun.com/chat/pro-128k
Spark Pro请求地址，对应的domain参数为generalv3：
    wss://spark-api.xf-yun.com/v3.1/chat
Spark Lite请求地址，对应的domain参数为lite：
    wss://spark-api.xf-yun.com/v1.1/chat
#endif
 
#define SPARKAI_HOST        "spark-api.xf-yun.com"   // 星火大模型API
// 星火认知大模型Spark Max的URL值，其他版本大模型URL值请前往文档（https://www.xfyun.cn/doc/spark/Web.html）查看
#define SPARKAI_VER         "/v1.1/chat"
#define SPARKAI_URL         "ws://"SPARKAI_HOST""SPARKAI_VER""
// 星火认知大模型调用秘钥信息，请前往讯飞开放平台控制台（https://console.xfyun.cn/services/bm35）查看
#define SPARKAI_APP_ID      "e1081f9d"
#define SPARKAI_API_SECRET  "ZDlmNzQ2NzdjYzZkMjY3NGNjNzAzYzJj"
#define SPARKAI_API_KEY     "337318a0efabfdba18e971767f7c32d7"
// 星火认知大模型Spark Max的domain值，其他版本大模型domain值请前往文档（https://www.xfyun.cn/doc/spark/Web.html）查看
#define SPARKAI_DOMAIN      "lite"
 
typedef struct {
    const char *api_secret;
    const char *api_key;
    const char *host;
    const char *ver;
    const char *url;
} sparkai_auth_t;

// WebSocket协议通用鉴权URL生成说明: https://www.xfyun.cn/doc/spark/general_url_authentication.html
void sparkai_authentication(sparkai_auth_t auth, char *url)
{
    // 1.2.1 date参数生成规则; date = Fri, 05 May 2023 10:43:39 GMT
    char strtime[32];
    struct tm cur_time;
    sntp_strtime(strtime, &cur_time);  // *注意：这里使用的时间是UTC时间（不是中国时间，不能+8H）
    LOGI("sparkai time = %s", strtime);  // Thu Jan  1 00:00:00 1970
    char week[4] = { '\0' }, month[4] = { '\0' };
    strncpy(week,  &strtime[0], 3);
    strncpy(month, &strtime[4], 3);
    strftime(strtime, sizeof(strtime), "%Y %H:%M:%S", &cur_time);
    char date[64] = { '\0' };
    sprintf(date, "%s, %02d %s %s GMT", week, cur_time.tm_mday, month, strtime);
    //LOGI("date = %s", date);  // Thu, 04 Jan 1970 00:00:00 GMT
    
    // 1.2.2 authorization参数生成规则
    // 1）到控制台获取APIKey 和APISecret参数
    // 2）利用上方的date动态拼接生成字符串tmp，这里以星火url为例，实际使用需要根据具体的请求url替换host和path。
    char tmp[128] = { '\0' }; // "host: spark-api.xf-yun.com\ndate: Fri, 05 May 2023 10:43:39 GMT\nGET /v1.1/chat HTTP/1.1";
    sprintf(tmp, "host: %s\ndate: %s\nGET %s HTTP/1.1", auth.host, date, auth.ver);
    //LOGI("tmp = %s", tmp);

    // esp32的hmac-sha256算法: https://docs.espressif.com/projects/esp-idf/zh_CN/v4.3.2/esp32c3/api-reference/peripherals/hmac.html
    // hmacSha256在线加密工具: https://www.jyshare.com/crypto/hmacsha256/
    // 3）利用hmac-sha256算法结合APISecret对上一步的tmp签名，获得签名后的摘要tmp_sha。
    ets_efuse_write_key(ETS_EFUSE_BLOCK_KEY3, ETS_EFUSE_KEY_PURPOSE_HMAC_UP, auth.api_secret, strlen(auth.api_secret));
    uint8_t tmp_sha[32] = { '\0' };
    esp_hmac_calculate(HMAC_KEY3, tmp, strlen(tmp), tmp_sha);
    //esp_log_buffer_hex("hmac", tmp_sha, 32);

    // 4）将上方的tmp_sha进行base64编码生成signature
    size_t dlen;
    unsigned char signature[48] = { '\0' };
    mbedtls_base64_encode( signature, sizeof( signature ), &dlen, tmp_sha, sizeof(tmp_sha) );
    //LOGI("signature = %s", signature);

    // 5）利用上面生成的signature，拼接下方的字符串生成authorization_origin
    char authorization_origin[240] = { '\0' };
    sprintf(authorization_origin, "api_key=\"%s\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"%s\"", auth.api_key, signature);
    // 6）最后再将上方的authorization_origin进行base64编码,生成最终的authorization
    unsigned char authorization[240] = { '\0' };
    mbedtls_base64_encode( authorization, sizeof( authorization ), &dlen, (const uint8_t *)authorization_origin, strlen(authorization_origin) );
    //LOGI("authorization = %s", authorization);

    // 1.2.3 生成最终url; 将鉴权参数组合成最终的键值对，并urlencode生成最终的握手url
    // date = Fri, 05 May 2023 10:43:39 GMT ==>> Fri%2C+05+May+2023+10%3A43%3A39
    sprintf(date, "%s%%2C+%02d+%s+%d+%02d%%3A%02d%%3A%02d+GMT", week, cur_time.tm_mday, month, 1900 + cur_time.tm_year, cur_time.tm_hour, cur_time.tm_min, cur_time.tm_sec);
    sprintf(url, "%s?authorization=%s&date=%s&host=%s", auth.url, authorization, date, auth.host);
    //LOGI("url = %s", url);
}

void sparkai_create_url(char *url)
{
    sparkai_auth_t auth = {
        .api_secret = SPARKAI_API_SECRET,
        .api_key = SPARKAI_API_KEY,
        .host = SPARKAI_HOST,
        .ver = SPARKAI_VER,
        .url = SPARKAI_URL,
    };
    sparkai_authentication(auth, url);
}
  

//  请求接口说明: https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
static void sparkai_chat_message(const char *content)
{
    cJSON *root = cJSON_CreateObject(); 
    
    cJSON *header = cJSON_CreateObject();
    cJSON_AddStringToObject(header, "app_id", SPARKAI_APP_ID);
    cJSON_AddStringToObject(header, "uid", "1");
    cJSON_AddItemToObject(root, "header", header);

    cJSON *parameter = cJSON_CreateObject();
    cJSON *chat = cJSON_CreateObject();
    cJSON_AddStringToObject(chat, "domain", SPARKAI_DOMAIN);
	cJSON_AddNumberToObject(chat, "temperature", SPARKAI_THRESHOLD);
    cJSON_AddNumberToObject(chat, "max_tokens", SPARKAI_MAX_TOKENS);
    cJSON_AddItemToObject(parameter, "chat", chat);
    cJSON_AddItemToObject(root, "parameter", parameter);
 
    cJSON *payload = cJSON_CreateObject();
    cJSON *message = cJSON_CreateObject();
    cJSON *text    = cJSON_CreateArray();
    cJSON *tokens  = cJSON_CreateObject();
    cJSON_AddStringToObject(tokens, "role", "user");
    cJSON_AddStringToObject(tokens, "content", content);
    cJSON_AddItemToArray(text, tokens);
    cJSON_AddItemToObject(message, "text", text);
    cJSON_AddItemToObject(payload, "message", message);
    cJSON_AddItemToObject(root, "payload", payload);

    // cJSON_Print(root); // cJSON_PrintUnformatted(root);
    char *data = cJSON_PrintUnformatted(root);
    uint16_t len = strlen(data); 
    websocket_client_send_text(data, len, portMAX_DELAY);
    //LOGI("sparkai_message = %s | %d", data, len);
 
    cJSON_free(data);
    cJSON_Delete(root);
    root = NULL;
}


static bool sparkai_analysis(const char *value, sparkai_accept_t *accept) 
{
    bool ret = false;
    cJSON *root = cJSON_Parse(value);
    if (root == NULL) return ret;
    cJSON *payload = cJSON_GetObjectItem(root,    "payload");
    if (payload == NULL) goto sparkai_exit;
    cJSON *choices = cJSON_GetObjectItem(payload, "choices");
    if (choices == NULL)  goto sparkai_exit;
    cJSON *status  = cJSON_GetObjectItem(choices, "status"); 
    if (status != NULL) {
        accept->status = status->valueint;
    }
    cJSON *seq = cJSON_GetObjectItem(choices, "seq"); 
    if (seq != NULL) {
        accept->seq = seq->valueint;
    }
    cJSON *text = cJSON_GetObjectItem(choices, "text"); 
    if (text != NULL) {
        cJSON *item = cJSON_GetArrayItem(text, 0);
        if (item != NULL) {
            cJSON *content = cJSON_GetObjectItem(item, "content"); 
            uint16_t len = strlen(content->valuestring);
            if (len > sizeof(accept->text)) {
                LOGW("content->valuestring len = %d", len);
                len = sizeof(accept->text);
            }
            strncpy(accept->text, content->valuestring, len);
            ret = true;
        }
    }
sparkai_exit:
    /* Remember to free memory */
    cJSON_Delete(root);  
    root = NULL;
    return ret;
}
 
//===================================================================================================================
//===================================================================================================================
// 大模型中文语音识别 API 文档: https://www.xfyun.cn/doc/spark/spark_zh_iat.html#%E4%B8%80%E3%80%81%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
#define AUDIOAI_HOST        "iat.xf-yun.com"     // 语音合成大模型
#define AUDIOAI_VER         "/v1"
#define AUDIOAI_URL         "ws://"AUDIOAI_HOST""AUDIOAI_VER""
#define AUDIOAI_DOMAIN      "slm"
#define AUDIOAI_LANGUAGE    "zh_cn"
#define AUDIOAI_SAMPLE_RATE  16000    // 音频属性: 采样率16k或8K、位长16bit、单声道
 
// 大模型中文语音识别 接口鉴权：https://www.xfyun.cn/doc/spark/spark_zh_iat.html#%E5%9B%9B%E3%80%81%E6%8E%A5%E5%8F%A3%E9%89%B4%E6%9D%83
void audioai_create_url(char *url)
{
    sparkai_auth_t auth = {
        .api_secret = SPARKAI_API_SECRET,
        .api_key = SPARKAI_API_KEY,
        .host = AUDIOAI_HOST,
        .ver = AUDIOAI_VER,
        .url = AUDIOAI_URL,
    };
    sparkai_authentication(auth, url);
}
 

// 中文识别请求数据格式 https://www.xfyun.cn/doc/spark/spark_zh_iat.html#%E4%BA%94%E3%80%81%E6%95%B0%E6%8D%AE%E4%BC%A0%E8%BE%93%E6%8E%A5%E6%94%B6%E4%B8%8E%E8%AF%B7%E6%B1%82%E3%80%81%E8%BF%94%E5%9B%9E%E7%A4%BA%E4%BE%8B
static void audioai_iat_message(audioai_request_t msg)
{
    // payload.audio.audio	string	是	音频数据base64 音频时长不要超过60秒
    size_t dlen;
    char *audio_pcm = (char *)ai_memory_malloc(SPARKAI_MAX_TOKENS * 2);
    assert(audio_pcm);
    // char *audio_pcm = (char *)heap_caps_malloc(SPARKAI_MAX_TOKENS * 2, MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT);
    mbedtls_base64_encode( (unsigned char *)audio_pcm, SPARKAI_MAX_TOKENS * 2, &dlen, (const uint8_t *)msg.pcm, msg.size );
    
    cJSON *root = cJSON_CreateObject(); 
    if (root == NULL) return;

    cJSON *header = cJSON_CreateObject();
    cJSON_AddStringToObject(header, "app_id", SPARKAI_APP_ID);
    cJSON_AddNumberToObject(header, "status", msg.status);
    cJSON_AddItemToObject(root, "header", header);

    if (msg.status == 0) {
        cJSON *parameter = cJSON_CreateObject();
        cJSON *iat = cJSON_CreateObject();
        cJSON_AddStringToObject(iat, "domain", AUDIOAI_DOMAIN);
        cJSON_AddStringToObject(iat, "language", AUDIOAI_LANGUAGE);
        cJSON_AddStringToObject(iat, "accent", "mandarin");
        cJSON_AddNumberToObject(iat, "eos",  2000); // 静音多少秒停止识别 如6000毫秒
        cJSON_AddNumberToObject(iat, "vinfo",  1);
        cJSON_AddStringToObject(iat, "dwa", "wpgs");
        cJSON *result = cJSON_CreateObject();
        cJSON_AddStringToObject(result, "encoding", "utf8");
        cJSON_AddStringToObject(result, "compress", "raw");
        cJSON_AddStringToObject(result, "format", "json");
        cJSON_AddItemToObject(iat, "result", result);
        cJSON_AddItemToObject(parameter, "iat", iat);
        cJSON_AddItemToObject(root, "parameter", parameter);
    }
 
    cJSON *payload = cJSON_CreateObject();
    cJSON *audio = cJSON_CreateObject();
    cJSON_AddStringToObject(audio, "encoding", "raw");
    cJSON_AddNumberToObject(audio, "sample_rate",  AUDIOAI_SAMPLE_RATE);
    cJSON_AddNumberToObject(audio, "channels",  1);
    cJSON_AddNumberToObject(audio, "bit_depth", 16);
    cJSON_AddNumberToObject(audio, "seq",     msg.seq);
    cJSON_AddNumberToObject(audio, "status",  msg.status);
    cJSON_AddStringToObject(audio, "audio",   audio_pcm);
    cJSON_AddItemToObject(payload, "audio", audio);
    cJSON_AddItemToObject(root, "payload", payload);

    // cJSON_Print(root); // cJSON_PrintUnformatted(root);
    char *data = cJSON_PrintUnformatted(root);
    uint16_t len = strlen(data);
    websocket_client_send_text(data, len, portMAX_DELAY);
    //LOGI("audioai_iat_message = %s | %d", data, len);
    
    cJSON_free(data);
    cJSON_Delete(root);
    root = NULL;

    ai_memory_free(audio_pcm);
    audio_pcm = NULL;
}

// {"sn":1,"ls":true,"bg":0,"ed":0,"pgs":"apd","rst":"rlt","ws":[{"bg":0,"cw":[{"sc":0.00,"w":"你好"}]}]}
static bool audioai_analysis_tts(const char *value, char *tts_text)
{
    bool ret = false;
    cJSON *root = cJSON_Parse(value);
    if (root == NULL) return ret;
    cJSON *wsObj = cJSON_GetObjectItem(root, "ws");
    if (wsObj != NULL) {
        //LOGI("text = %s", value);
        uint8_t arrSize = cJSON_GetArraySize(wsObj);
        for (uint8_t i = 0; i < arrSize; i++) {
            cJSON *wsItem = cJSON_GetArrayItem(wsObj, i);
            cJSON *cwObj  = cJSON_GetObjectItem(wsItem, "cw");
            cJSON *cwItem = cJSON_GetArrayItem(cwObj, 0);
            cJSON *wObj   = cJSON_GetObjectItem(cwItem, "w");
            //LOGI("tts = %s", wObj->valuestring);
            if (tts_text) strcat(tts_text, wObj->valuestring);
            ret = true;
        }
    }
    cJSON_Delete(root);
    root = NULL;
    return ret;
}

static bool audioai_analysis(const char *value, audioai_accept_t *accept) 
{
    bool ret = false;
    cJSON *root = cJSON_Parse(value);
    if (root == NULL) return false;
    cJSON *header = cJSON_GetObjectItem(root,   "header");
    if (header == NULL) goto audioai_exit;
    cJSON *status = cJSON_GetObjectItem(header, "status");
    if (status != NULL) {
        accept->status = status->valueint;
    }

    cJSON *payload = cJSON_GetObjectItem(root,   "payload");
    if (payload == NULL) goto audioai_exit;
    cJSON *result  = cJSON_GetObjectItem(payload, "result");
    cJSON *seq = cJSON_GetObjectItem(result, "seq"); 
    if (seq != NULL) {
        accept->seq = seq->valueint;
    }

    cJSON *text = cJSON_GetObjectItem(result, "text"); 
    if (text != NULL) {
        size_t  dlen;
        uint8_t *decode = (uint8_t *)ai_memory_calloc(2048, sizeof(char));
        assert(decode);
        mbedtls_base64_decode( (unsigned char *)decode, 2048, &dlen, (const uint8_t *)text->valuestring, strlen(text->valuestring) );
        ret = audioai_analysis_tts((const char *)decode, accept->text);
        ai_memory_free(decode);
        decode = NULL;
    }
audioai_exit:
    /* Remember to free memory */
    cJSON_Delete(root);  
    root = NULL;
    return ret;
}


//===================================================================================================================
//===================================================================================================================
static sparkai_model_t sparkai_model;
static bool sparkai_start(sparkai_model_t model)
{
    char *url = ai_memory_calloc(360, sizeof(char));

    if (model == SPARKAI_MODEL) {
        sparkai_create_url(url);
    } else if (model == AUDIOAI_MODEL) {
        audioai_create_url(url);
    } else {
        return false;
    }
 
    websocket_client_stop();

    websocket_client_set_url(url);

    websocket_client_start();

    for (uint16_t conn_timeout = 0; conn_timeout < 200; conn_timeout++) {
        if (websocket_client_is_connected()) break;  // 等待链接...
        else vTaskDelay(10);
    }
    ai_memory_free(url);
    url = NULL;
    return true;
}

#define CONFIG_AITEXT_SIZE     2048
static char *aitext = NULL;
static audioai_request_t req = { 0 };
static const uint8_t AUDIOAI_SEND_EVENT = BIT0;
static const uint8_t AUDIOAI_WAIT_EVENT = BIT1;
static const uint8_t AUDIOAI_OVER_EVENT = BIT2;
static const uint8_t SPARKAI_SEND_EVENT = BIT4;
static EventGroupHandle_t xEvent = NULL;

int sparkai_request(void *msg, sparkai_model_t model)
{
    if (!websocket_client_is_connected()) {
        LOGI("sparkai_start...");
        sparkai_start(model);  // 建立服务器链接
        LOGI("sparkai_start OK...");
    }
  
    if (websocket_client_is_connected()) {
        if (model == SPARKAI_MODEL) {
            const char *request = (const char *)msg;
            sparkai_chat_message(request);
            memset(aitext, 0, CONFIG_AITEXT_SIZE);
        } else if (model == AUDIOAI_MODEL) {
            audioai_request_t *request = (audioai_request_t *)msg;
            audioai_iat_message(*request);
        }
        sparkai_model = model;
        return 1;
    }
    return 0;
}

//===================================================================================================================
//===================================================================================================================
static void websocket_data_callback(wss_data_t data)
{
    bool ret = false;
    sparkai_accept_t accept = { 0 };
    if (sparkai_model == SPARKAI_MODEL) {
        ret = sparkai_analysis((const char *)data.value, &accept);
        if (ret == false) return;
        if (strlen(aitext) < CONFIG_AITEXT_SIZE) strcat(aitext, accept.text);
        if (accept.status == SPARKAI_STATUE_OVER) {
            LOGI("sparkAi: %s", aitext);
            memset(&aitext[256], 0, CONFIG_AITEXT_SIZE - 256);
            audio_tts_player(aitext);
        }
    } else if (sparkai_model == AUDIOAI_MODEL) {
        ret = audioai_analysis((const char *)data.value, &accept);
        if (ret == false) return;
        if (accept.status == SPARKAI_STATUE_OVER) {
            strcat(aitext, accept.text);
            LOGI("aitext_end: %s", aitext);
            xEventGroupSetBits(xEvent, AUDIOAI_OVER_EVENT | AUDIOAI_WAIT_EVENT | SPARKAI_SEND_EVENT);
        } else if (accept.seq >= 1) {
            strcpy(aitext, accept.text);
            LOGI("aitext: %s", aitext);
        }
    }
    // ESP_LOGI(TAG, "wss_data = %s", data.data_ptr);
    // ESP_LOGI(TAG, "accept: %d %d %s", accept.status, accept.seq, accept.text);
}



#if 1
 
//if (req.pcm == NULL) req.pcm = ai_memory_malloc(SPARKAI_MAX_TOKENS);
void audioai_req_handler(bool action, void *pcm, uint16_t size)
{
    if (aitext == NULL || xEventGroupWaitBits(xEvent, AUDIOAI_WAIT_EVENT, pdFALSE, pdFALSE, 0)) return; 
    //LOGI("size = %d", size);
    
    memcpy(&req.pcm[req.size], pcm, size);
    req.size += size;
    if (req.size < SPARKAI_MAX_TOKENS) return;
    req.size = 0;
 
    if (action) {  // 按下开始识别
        if (req.seq == 0) {
            req.status = SPARKAI_STATUE_BEGIN;
            req.seq = 1;
            LOGI("audioai_start...");
            memset(aitext, 0, CONFIG_AITEXT_SIZE);
        } else if (req.seq) {
            req.status = SPARKAI_STATUE_KEEP;
            req.seq += 1;
            if (!websocket_client_is_connected()) { // 过程中断开连接了，中断此次对话
                req.seq = 0;
                xEventGroupSetBits(xEvent, AUDIOAI_OVER_EVENT);
                audio_tts_player("网络异常");
            }
        }
    } else {
        if (req.seq) {
            req.seq = 0;  // 这里如果识别完成后，不主动发送 SPARKAI_STATUE_OVER 了！
        }
    }
    
    if (req.seq > 0) {
        xEventGroupSetBits(xEvent, AUDIOAI_SEND_EVENT | AUDIOAI_WAIT_EVENT);
        req.size = 0;
    }
}

bool audioai_speech_over(void)
{
    if (xEvent == NULL) return false;
    EventBits_t uxBits = xEventGroupWaitBits(xEvent, AUDIOAI_OVER_EVENT, pdTRUE, pdFALSE, 8000);
    LOGI("uxBits = 0x%x", uxBits);
    return (uxBits & AUDIOAI_OVER_EVENT);
}
 
void sparkai_handler_task(void *arg)
{
    while (1) {
        EventBits_t uxBits = xEventGroupWaitBits(xEvent, AUDIOAI_SEND_EVENT | SPARKAI_SEND_EVENT, pdTRUE, pdFALSE, portMAX_DELAY);
        if (uxBits & AUDIOAI_SEND_EVENT) {
            //LOGI("req.seq = %d", req.seq);
            req.size = SPARKAI_MAX_TOKENS;
            sparkai_request(&req, AUDIOAI_MODEL);  // 发送语音识别
        } else if (uxBits & SPARKAI_SEND_EVENT) {
            uint16_t len = strlen(aitext);
            if (len < 2) continue;
            //websocket_client_send_text("close", 5, portMAX_DELAY);  // 这里故意发错数据，让服务器主动断开链接！
            websocket_client_close(5000);  // 主动断开连接，不然等待超时断开连接15秒太久了.
            LOGI("sparkai.aitext: %s | %d", aitext, len);
            sparkai_request((char *)aitext, SPARKAI_MODEL);
        }
        req.size = 0; 
        xEventGroupClearBits(xEvent, AUDIOAI_WAIT_EVENT);
    }
}

#else  // 通过按键识别
#include "hal_gpio.h"
#include "hal_adc.h"
 
#define CONFIG_AUDIO_KEY      GPIO_NUM_0

void audioai_handler(void)
{
    static uint8_t keytime = 0;
    static audioai_request_t req = { 0 };
    if (!gpio_get_level(CONFIG_AUDIO_KEY)) {  // 按下开始识别
        if (keytime < 5) {  // 消抖
            keytime++;
            return;
        }
        if (req.seq == 0) {
            req.status = SPARKAI_STATUE_BEGIN;
            req.seq = 1;
            LOGI("audioai_start...");
            adc_read_start();
            memset(aitext, 0, CONFIG_AITEXT_SIZE);
        } else if (req.seq) {
            req.status = SPARKAI_STATUE_KEEP;
            req.seq += 1;
        }
    } else {
        keytime = 0;
        if (req.seq) {
            if (req.status != SPARKAI_STATUE_OVER) {
                req.status = SPARKAI_STATUE_OVER;
                req.seq += 1;
            } else {
                req.seq = 0;
                adc_read_stop();
            }
        } else if (audio2ai_is_ok) {
            audio2ai_is_ok = 0;
            //websocket_client_send_text("close", 5, portMAX_DELAY);  // 这里故意发错数据，让服务器主动断开链接！
            websocket_client_close(5000);  // 主动断开连接，不然等待超时断开连接15秒太久了.
            LOGI("sparkai.aitext: %s", aitext);
            vTaskDelay(1000);
            sparkai_request((char *)aitext, SPARKAI_MODEL);
        }
        if (req.seq == 0) {
            static uint16_t logTime = 0;
            if (++logTime >= 250) {
                logTime = 0;
                LOGI("请按住BOOT键讲话...");
            }
            return;
        } 
    } 

    adc_val_t val;
    if (adc_read_buff(&val, 100)) {
        req.pcm  = val.buff;
        req.size = val.size;
        sparkai_request(&req, AUDIOAI_MODEL);  // 发送语音识别
    }
}

void sparkai_handler_task(void *arg)
{
    // config gpio input mode.
    gpio_set_direction(CONFIG_AUDIO_KEY, GPIO_MODE_INPUT);
    gpio_set_pull_mode(CONFIG_AUDIO_KEY, GPIO_PULLUP_ONLY);
    gpio_pullup_en(CONFIG_AUDIO_KEY);
    while (1) {
        vTaskDelay(40);  // 我们建议：未压缩的PCM格式，每次发送音频间隔40ms，每次发送音频字节数1280B；
        audioai_handler();
    }
}
#endif

void sparkai_init(void)
{
    websocket_init();
    websocket_register_callback(websocket_data_callback);
    req.pcm = ai_memory_malloc(SPARKAI_MAX_TOKENS);
    aitext = ai_memory_malloc(CONFIG_AITEXT_SIZE);
    xEvent = xEventGroupCreate();
    xTaskCreatePinnedToCore(sparkai_handler_task, "sparkai_handler_task", 8 * 1024, NULL, 12, NULL, APP_CPU_NUM);  
}


#if 0
// 算法验证！ 也可通过 python spark_ws.py 进行算法验证！
// https://www.xfyun.cn/doc/spark/general_url_authentication.html#_1-2-%E9%89%B4%E6%9D%83%E5%8F%82%E6%95%B0
void conn_auth_test(void)
{
    // 1.2.2 authorization参数生成规则
    // 1）到控制台获取APIKey 和APISecret参数
    // 2）利用上方的date动态拼接生成字符串tmp，这里以星火url为例，实际使用需要根据具体的请求url替换host和path。

    // hmac-sha256算法: https://docs.espressif.com/projects/esp-idf/zh_CN/v4.3.2/esp32c3/api-reference/peripherals/hmac.html
    // 3）利用hmac-sha256算法结合APISecret对上一步的tmp签名，获得签名后的摘要tmp_sha。
    const uint8_t key_data[32] = "MjlmNzkzNmZkMDQ2OTc0ZDdmNGE2ZTZi";
    int ets_status = ets_efuse_write_key(ETS_EFUSE_BLOCK_KEY4, ETS_EFUSE_KEY_PURPOSE_HMAC_UP, key_data, sizeof(key_data));
    if (ets_status != ESP_OK) {
       // return;
    }
    const char *tmp = "host: spark-api.xf-yun.com\ndate: Fri, 05 May 2023 10:43:39 GMT\nGET /v1.1/chat HTTP/1.1";
    uint8_t tmp_sha[32];
    esp_err_t result = esp_hmac_calculate(HMAC_KEY4, tmp, strlen(tmp), tmp_sha);
    esp_log_buffer_hex("hmac", tmp_sha, 32);
    if (result != ESP_OK) {
        return;
    }
 
    // 4）将上方的tmp_sha进行base64编码生成signature
    size_t dlen;
    unsigned char signature[48] = { '\0' };
    if ( mbedtls_base64_encode( signature, sizeof( signature ), &dlen, tmp_sha, sizeof(tmp_sha) ) != 0 ) {  // ERROR
        return;
    }
    LOGI("signature = %s", signature);

    // 5）利用上面生成的signature，拼接下方的字符串生成authorization_origin
    char *authorization_origin = "api_key=\"addd2272b6d8b7c8abdd79531420ca3b\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"z5gHdu3pxVV4ADMyk467wOWDQ9q6BQzR3nfMTjc/DaQ=\"";
    // 6）最后再将上方的authorization_origin进行base64编码,生成最终的authorization
    unsigned char authorization[225] = { '\0' };
    if ( mbedtls_base64_encode( authorization, sizeof( authorization ), &dlen, (const uint8_t *)authorization_origin, strlen(authorization_origin) ) != 0 ) {  // ERROR
        return;
    }
    LOGI("authorization = %s", authorization);
    vTaskDelay(1000);
}
#endif

 


