package com.spzx.report.aiUtils;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;

public class PineconeUploadUtil {
    static ObjectMapper OBJECT_MAPPER = new ObjectMapper();

    /*
     * 前期数据处理,批量上传数据
     * @Param list 关系型数据库中查询的列表数据
     * @Param pineconeKey pinecone API-KEY 权限认证
     * @Param pineconeIndexKey 向量数据库url地址
     * */

    public static void uploadListToPinecone(List<Map<String, Object>> list,
                                            String pineconeKey, String pineconeIndexKey) {
        List<Map<String, Object>> batch = new ArrayList<>();
        int lineNumber = 0;
        int successCount = 0;
        int failedCount = 0;

        for (Map<String, Object> map : list) {
            String cleanText = map.toString().trim();

            //获得嵌入式数据中的向量组
            float[] vector = SpzxEmbeddingTools.getEmbedding(cleanText).vector();
            if (vector.length == 0) {
                System.out.println("向量化失败,跳过[" + lineNumber + "]行:" + cleanText);
                failedCount++;
                continue;
            }

            //向量组实体
            HashMap<String, Object> vectorEntry = new HashMap<>();
            vectorEntry.put("id", "num_" + lineNumber);
            vectorEntry.put("values", vector);

            //添加元数据
            HashMap<String, String> metadata = new HashMap<>();
            metadata.put("column", map.get("key_column").toString().trim());
            metadata.put("word", map.get("key_word").toString().trim());

            //数据填入
            vectorEntry.put("metadata", metadata);
            batch.add(vectorEntry);
            lineNumber++;

            if (batch.size() >= 50) {
                uploadToPinecone(batch, pineconeKey, pineconeIndexKey);
                successCount += batch.size();
                batch.clear();
            }
        }

        if (!batch.isEmpty()) {
            uploadToPinecone(batch, pineconeKey, pineconeIndexKey);
            successCount += batch.size();
        }
    }

    private static void uploadToPinecone(List<Map<String, Object>> vectors, String pineconeKey, String pineconeIndexKey) {
        try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
            HttpPost httpPost = new HttpPost(pineconeIndexKey);
            httpPost.setHeader("Content-Type", "application/json");
            httpPost.setHeader("Api-Key", pineconeKey);

            HashMap<String, Object> requestBody = new HashMap<>();
            requestBody.put("vectors", vectors);
            requestBody.put("namespace", "Spzx-aiReport");
            String jsonBody = null;
            jsonBody = OBJECT_MAPPER.writeValueAsString(requestBody);

            //UTF-8编码设置
            httpPost.setEntity(new StringEntity(jsonBody, StandardCharsets.UTF_8));

            try (CloseableHttpResponse response = httpClient.execute(httpPost)){
                //发送请求
                String responseString = EntityUtils.toString(response.getEntity());
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
