package com.alinesno.infra.base.search.gateway.provider;

import cn.hutool.core.io.FileTypeUtil;
import cn.hutool.core.util.IdUtil;
import com.alibaba.fastjson.JSONObject;
import com.alinesno.infra.base.search.api.DataProcessingDto;
import com.alinesno.infra.base.search.api.DatasetSearchDto;
import com.alinesno.infra.base.search.entity.DatasetClientEntity;
import com.alinesno.infra.base.search.entity.DatasetKnowledgeEntity;
import com.alinesno.infra.base.search.entity.VectorDatasetEntity;
import com.alinesno.infra.base.search.interceptor.DatasetApiKeyRequired;
import com.alinesno.infra.base.search.service.IDatasetClientService;
import com.alinesno.infra.base.search.service.IDatasetKnowledgeService;
import com.alinesno.infra.base.search.service.IVectorDatasetService;
import com.alinesno.infra.common.facade.datascope.PermissionQuery;
import com.alinesno.infra.common.facade.response.AjaxResult;
import com.alinesno.infra.smart.assistant.adapter.dto.DocumentVectorBean;
import com.alinesno.infra.smart.assistant.adapter.dto.VectorSearchDto;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import io.jsonwebtoken.lang.Assert;
import jakarta.validation.Valid;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.util.List;
import java.util.Objects;

/**
 * 数据集接口
 */
@Slf4j
@Valid
@RestController
@RequestMapping("/api/base/dataset/document")
public class DatasetApiController {

    @Autowired
    private IDatasetKnowledgeService datasetKnowledgeService ;

    @Autowired
    private IVectorDatasetService vectorDatasetService ;

    @Autowired
    private IDatasetClientService datasetClientService ;

    @Value("${alinesno.file.local.path:${java.io.tmpdir}}")
    private String localPath  ;

    @Autowired
    @Qualifier("chatThreadPool") // 注入配置的线程池
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;

    /**
     * 上传文档
     * @return
     */
    @DatasetApiKeyRequired
    @PostMapping(value = "/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
    public AjaxResult upload(
            @RequestHeader("Dataset-API-Key") String apiKey,
            @RequestPart("file") MultipartFile file,
            @RequestParam Long datasetId) throws Exception {

        DatasetClientEntity client = datasetClientService.getOne(
                new LambdaQueryWrapper<DatasetClientEntity>()
                .eq(DatasetClientEntity::getApiKey, apiKey)
        );

        Assert.notNull(client, "无效的API密钥");

        // 判断datesetId是否存在
        VectorDatasetEntity vectorDataset = vectorDatasetService.getById(datasetId) ;
        Assert.notNull(vectorDataset , "数据集不存在");

        PermissionQuery query = getPermissionQuery(client);

        String fileName = file.getOriginalFilename();

        // 在同一个数据集,如果是同名的文件,则不允许上传
        LambdaQueryWrapper<DatasetKnowledgeEntity> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(DatasetKnowledgeEntity::getDatasetId, datasetId) ;
        wrapper.eq(DatasetKnowledgeEntity::getDocumentName , fileName);
        Assert.isTrue(datasetKnowledgeService.count(wrapper) == 0 , "文件["+fileName+"]已存在,请删除原文件重新导入.");

        // 新生成的文件名称
        String fileSuffix = Objects.requireNonNull(file.getOriginalFilename()).substring(file.getOriginalFilename().lastIndexOf(".")+1);
        String newFileName = IdUtil.getSnowflakeNextId() + "." + fileSuffix;

        // 复制文件
        File targetFile = new File(localPath , newFileName);
        FileUtils.writeByteArrayToFile(targetFile, file.getBytes());

        String fileType = FileTypeUtil.getType(targetFile);

        datasetKnowledgeService.saveDatasetTmpFile(datasetId, fileName, targetFile , fileType, fileSuffix , query) ;

        // 处理完成之后删除文件
        FileUtils.forceDeleteOnExit(targetFile);

        return AjaxResult.success("上传成功") ;
    }

    @NotNull
    private static PermissionQuery getPermissionQuery(DatasetClientEntity client) {
        PermissionQuery query = new PermissionQuery();
        query.setOrgId(client.getOrgId());
        query.setDepartmentId(client.getDepartmentId());
        query.setOperatorId(client.getOperatorId());
        return query;
    }

    /**
     * 解析成为向量库
     * @param datasetId
     * @return
     */
    @DatasetApiKeyRequired
    @GetMapping("/parse")
    public AjaxResult parseVector(
            @RequestHeader("Dataset-API-Key") String apiKey,
            @RequestParam Long datasetId ,
            @RequestParam int checkLength) {

        if(checkLength < 512){
            return AjaxResult.error("checkLength长度不能小于512");
        }

        log.info("解析文档向量：{}", datasetId);

        DataProcessingDto dto = new DataProcessingDto();
        dto.setDatasetId(datasetId);
        dto.setProcessingMethod("direct_segmentation");
        dto.setIdealChunkLength(String.valueOf(checkLength));
        dto.setCustomSplitSymbol("\\n;======;==SPLIT==");
        dto.setProcessingParam("自动");

        threadPoolTaskExecutor.execute(() -> {
            try {
                datasetKnowledgeService.dataUploadToVectorDataset(dto) ;
            } catch (Exception e) {
                log.error("文件上传解析失败", e);
            }
        });

        return AjaxResult.success("解析成功") ;
    }

    /**
     * 搜索向量
     * @param searchDto
     * @return
     */
    @DatasetApiKeyRequired
    @PostMapping("/search")
    public AjaxResult searchVector(
            @RequestHeader("Dataset-API-Key") String apiKey,
            @RequestBody DatasetSearchDto searchDto) {

        log.info("搜索向量：{}", searchDto.getQuery());

        // 关联多个知识库的处理
        List<Long> datasetIdArr = searchDto.getDatasetIds() ;

        // 判断datasetIdArr里面的每个id是否存在
        LambdaQueryWrapper<VectorDatasetEntity> wrapper = new LambdaQueryWrapper<>();
        wrapper.in(VectorDatasetEntity::getId, datasetIdArr);

        List<VectorDatasetEntity> vectorDatasetList = vectorDatasetService.list(wrapper) ;

        // 再过滤出真实的数据集
        List<Long> realDatasetIdArr = vectorDatasetList.stream().map(VectorDatasetEntity::getId).toList();

        if(realDatasetIdArr.isEmpty()){
            return AjaxResult.error("数据集不存在，而且为空.");
        }

        VectorSearchDto dto = new VectorSearchDto() ;
        dto.setSearchText(searchDto.getQuery()) ;
        dto.setTopK(searchDto.getTopK()) ;

        List<DocumentVectorBean> queryResult = vectorDatasetService.searchMultiDataset(dto ,realDatasetIdArr);
        return AjaxResult.success("搜索成功", JSONObject.toJSON(queryResult));
    }

    // 示例：不需要验证的方法（不添加注解）
    @GetMapping("/info")
    public AjaxResult publicInfo() {
        return AjaxResult.success("公共信息");
    }
}