package com.company.loganalyzer.tools.impl;

import com.company.loganalyzer.config.ElasticsearchConfig;
import com.company.loganalyzer.dto.ToolInput;
import com.company.loganalyzer.dto.ToolResult;
import com.company.loganalyzer.model.LogEntry;
import com.company.loganalyzer.tools.LogAnalysisTool;
import com.company.loganalyzer.utils.TimeRangeUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Mono;

import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static org.elasticsearch.index.query.QueryBuilders.*;

/**
 * 日志搜索工具
 * 
 * 对应Python版本的search_logs函数
 * 支持关键词搜索、时间范围、服务名、日志级别等多维度过滤
 */
@Component
@Slf4j
@RequiredArgsConstructor
public class LogSearchTool implements LogAnalysisTool {

    private final ElasticsearchOperations elasticsearchOperations;
    private final ElasticsearchConfig elasticsearchConfig;
    private final ObjectMapper objectMapper;

    @Override
    public String getName() {
        return "search_logs";
    }

    @Override
    public String getDescription() {
        return "搜索日志记录，支持关键词、时间范围、服务名、日志级别等多维度过滤";
    }

    @Override
    public String[] getSupportedParameters() {
        return new String[]{"query", "timeRange", "serviceName", "level", "traceId", "userId", "limit", "page", "pageSize"};
    }

    @Override
    public boolean canHandle(ToolInput input) {
        // 只要有查询条件就可以处理
        return input.getQuery() != null || 
               input.getTimeRange() != null || 
               input.getServiceName() != null || 
               input.getLevel() != null ||
               input.getTraceId() != null;
    }

    @Override
    public int getPriority() {
        return 10; // 高优先级
    }

    @Override
    public Mono<ToolResult> execute(ToolInput input) {
        return Mono.fromCallable(() -> {
            long startTime = System.currentTimeMillis();
            
            try {
                // 构建查询
                NativeSearchQuery searchQuery = buildSearchQuery(input);
                
                // 设置索引名称
                List<String> indexNames = TimeRangeUtils.generateIndexNames(
                    elasticsearchConfig.getIndexPattern(), 
                    input.getTimeRange()
                );
                
                // 执行搜索
                SearchHits<LogEntry> searchHits = elasticsearchOperations.search(
                    searchQuery, 
                    LogEntry.class,
                    indexNames.toArray(new String[0])
                );
                
                // 处理结果
                List<LogEntry> logs = searchHits.getSearchHits().stream()
                    .map(SearchHit::getContent)
                    .collect(Collectors.toList());
                
                // 分析日志数据
                Map<String, Object> analysis = analyzeLogs(logs);
                
                // 构建返回结果
                Map<String, Object> result = new HashMap<>();
                result.put("query", input.getQuery());
                result.put("timeRange", input.getTimeRange());
                result.put("totalCount", searchHits.getTotalHits());
                result.put("returnedCount", logs.size());
                result.put("analysis", analysis);
                result.put("logs", logs.stream().limit(20).collect(Collectors.toList())); // 只返回前20条详细日志
                result.put("indexNames", indexNames);
                
                long executionTime = System.currentTimeMillis() - startTime;
                
                log.info("日志搜索完成 - 查询: {}, 时间范围: {}, 结果数: {}, 耗时: {}ms", 
                    input.getQuery(), input.getTimeRange(), logs.size(), executionTime);
                
                ToolResult toolResult = ToolResult.success(getName(), result, "搜索完成");
                toolResult.setExecutionTime(executionTime);
                return toolResult;
                
            } catch (Exception e) {
                log.error("日志搜索失败", e);
                return ToolResult.error(getName(), "搜索失败: " + e.getMessage(), e);
            }
        });
    }

    /**
     * 构建ElasticSearch查询
     */
    private NativeSearchQuery buildSearchQuery(ToolInput input) {
        NativeSearchQueryBuilder queryBuilder = new NativeSearchQueryBuilder();
        
        // 构建布尔查询
        var boolQuery = boolQuery();
        
        // 关键词查询
        if (input.getQuery() != null && !input.getQuery().trim().isEmpty()) {
            boolQuery.should(queryStringQuery(input.getQuery()).field("message").boost(2.0f));
            boolQuery.should(queryStringQuery(input.getQuery()).field("exception").boost(1.5f));
            boolQuery.should(multiMatchQuery(input.getQuery(), "message", "exception", "serviceName"));
            boolQuery.minimumShouldMatch(1);
        }
        
        // 服务名过滤
        if (input.getServiceName() != null && !input.getServiceName().trim().isEmpty()) {
            boolQuery.filter(termQuery("serviceName", input.getServiceName()));
        }
        
        // 日志级别过滤
        if (input.getLevel() != null && !input.getLevel().trim().isEmpty()) {
            boolQuery.filter(termQuery("level", input.getLevel().toUpperCase()));
        }
        
        // TraceId过滤
        if (input.getTraceId() != null && !input.getTraceId().trim().isEmpty()) {
            boolQuery.filter(termQuery("traceId", input.getTraceId()));
        }
        
        // 用户ID过滤
        if (input.getUserId() != null && !input.getUserId().trim().isEmpty()) {
            boolQuery.filter(termQuery("userId", input.getUserId()));
        }
        
        // 时间范围过滤
        if (input.getTimeRange() != null) {
            LocalDateTime[] timeRange = TimeRangeUtils.parseTimeRange(input.getTimeRange());
            boolQuery.filter(rangeQuery("timestamp")
                .gte(timeRange[0])
                .lte(timeRange[1]));
        }
        
        queryBuilder.withQuery(boolQuery);
        
        // 排序
        queryBuilder.withSort(Sort.by(Sort.Direction.DESC, "timestamp"));
        
        // 分页
        int page = input.getPage() != null ? input.getPage() : 0;
        int pageSize = input.getPageSize() != null ? input.getPageSize() : 
                      (input.getLimit() != null ? input.getLimit() : elasticsearchConfig.getPageSize());
        
        Pageable pageable = PageRequest.of(page, Math.min(pageSize, 1000)); // 最大1000条
        queryBuilder.withPageable(pageable);
        
        return queryBuilder.build();
    }

    /**
     * 分析日志数据
     */
    private Map<String, Object> analyzeLogs(List<LogEntry> logs) {
        Map<String, Object> analysis = new HashMap<>();
        
        if (logs.isEmpty()) {
            analysis.put("message", "未找到匹配的日志记录");
            return analysis;
        }
        
        // 统计日志级别
        Map<String, Long> levelStats = logs.stream()
            .collect(Collectors.groupingBy(
                log -> log.getLevel() != null ? log.getLevel() : "UNKNOWN",
                Collectors.counting()
            ));
        
        // 统计服务分布
        Map<String, Long> serviceStats = logs.stream()
            .collect(Collectors.groupingBy(
                log -> log.getServiceName() != null ? log.getServiceName() : "UNKNOWN",
                Collectors.counting()
            ));
        
        // 计算错误率
        long errorCount = logs.stream()
            .mapToLong(log -> {
                String level = log.getLevel();
                return (level != null && (level.equals("ERROR") || level.equals("FATAL") || level.equals("CRITICAL"))) ? 1 : 0;
            })
            .sum();
        
        double errorRate = logs.size() > 0 ? (double) errorCount / logs.size() * 100 : 0;
        
        // 时间范围
        Map<String, String> timeRange = new HashMap<>();
        if (!logs.isEmpty()) {
            timeRange.put("start", logs.get(logs.size() - 1).getTimestamp().toString());
            timeRange.put("end", logs.get(0).getTimestamp().toString());
        }
        
        analysis.put("totalLogs", logs.size());
        analysis.put("errorCount", errorCount);
        analysis.put("errorRate", Math.round(errorRate * 100.0) / 100.0);
        analysis.put("levelDistribution", levelStats);
        analysis.put("serviceDistribution", serviceStats);
        analysis.put("timeRange", timeRange);
        
        return analysis;
    }
}