package metadata

import (
	"fmt"
	"path/filepath"

	"mcp-server/internal/types"
)

// VectorMetadataIntegration 向量数据与元数据集成
type VectorMetadataIntegration struct {
	metadataManager MetadataManager
	vectorStorage   VectorStorage // 假设的向量存储接口
}

// VectorStorage 向量存储接口（需要与现有向量模块集成）
type VectorStorage interface {
	Store(id string, vector []float64, metadata map[string]interface{}) error
	Retrieve(id string) ([]float64, map[string]interface{}, error)
	Search(vector []float64, topK int) ([]SearchResult, error)
	Delete(id string) error
}

// SearchResult 搜索结果
type SearchResult struct {
	ID       string                 `json:"id"`
	Score    float64                `json:"score"`
	Vector   []float64              `json:"vector"`
	Metadata map[string]interface{} `json:"metadata"`
}

// NewVectorMetadataIntegration 创建向量元数据集成
func NewVectorMetadataIntegration(metadataManager MetadataManager, vectorStorage VectorStorage) *VectorMetadataIntegration {
	return &VectorMetadataIntegration{
		metadataManager: metadataManager,
		vectorStorage:   vectorStorage,
	}
}

// ProcessFileWithVector 处理文件并生成向量和元数据
func (vmi *VectorMetadataIntegration) ProcessFileWithVector(filePath string, chunks []*types.TextChunk, vectors [][]float64) error {
	// 提取文档元数据
	metadata, err := vmi.metadataManager.ProcessFile(filePath)
	if err != nil {
		return fmt.Errorf("提取元数据失败: %v", err)
	}

	// 为每个分片存储向量和关联的元数据
	for i, chunk := range chunks {
		if i >= len(vectors) {
			break
		}

		// 创建分片级别的元数据
		chunkMetadata := vmi.createChunkMetadata(metadata, chunk)

		// 存储向量和元数据
		vectorID := fmt.Sprintf("%s_chunk_%d", filepath.Base(filePath), i)
		if err := vmi.vectorStorage.Store(vectorID, vectors[i], chunkMetadata); err != nil {
			return fmt.Errorf("存储向量失败: %v", err)
		}
	}

	return nil
}

// SearchWithMetadata 基于元数据增强的向量搜索
func (vmi *VectorMetadataIntegration) SearchWithMetadata(queryVector []float64, metadataFilter map[string]interface{}, topK int) ([]EnhancedSearchResult, error) {
	// 执行向量搜索
	vectorResults, err := vmi.vectorStorage.Search(queryVector, topK*2) // 获取更多结果用于过滤
	if err != nil {
		return nil, fmt.Errorf("向量搜索失败: %v", err)
	}

	var enhancedResults []EnhancedSearchResult

	// 增强搜索结果
	for _, result := range vectorResults {
		// 应用元数据过滤
		if vmi.matchesMetadataFilter(result.Metadata, metadataFilter) {
			enhancedResult := EnhancedSearchResult{
				SearchResult:     result,
				DocumentMetadata: vmi.extractDocumentMetadataFromChunk(result.Metadata),
			}
			enhancedResults = append(enhancedResults, enhancedResult)

			// 达到所需数量就停止
			if len(enhancedResults) >= topK {
				break
			}
		}
	}

	return enhancedResults, nil
}

// EnhancedSearchResult 增强的搜索结果
type EnhancedSearchResult struct {
	SearchResult     `json:"search_result"`
	DocumentMetadata *DocumentMetadata `json:"document_metadata"`
}

// UpdateVectorMetadata 更新向量关联的元数据
func (vmi *VectorMetadataIntegration) UpdateVectorMetadata(filePath string) error {
	// 更新文档元数据
	_, err := vmi.metadataManager.UpdateMetadata(filePath, true)
	if err != nil {
		return fmt.Errorf("更新元数据失败: %v", err)
	}

	// 这里需要与向量存储系统集成，更新相关向量的元数据
	// 具体实现取决于向量存储的API设计

	return nil
}

// GetMetadataStatistics 获取元数据统计信息
func (vmi *VectorMetadataIntegration) GetMetadataStatistics() (map[string]interface{}, error) {
	return vmi.metadataManager.GetStatistics()
}

// 私有辅助方法

// createChunkMetadata 创建分片级别的元数据
func (vmi *VectorMetadataIntegration) createChunkMetadata(docMetadata *DocumentMetadata, chunk *types.TextChunk) map[string]interface{} {
	chunkMetadata := make(map[string]interface{})

	// 文档级别元数据
	chunkMetadata["document_title"] = docMetadata.Title
	chunkMetadata["document_author"] = docMetadata.Author
	chunkMetadata["document_category"] = docMetadata.Category
	chunkMetadata["document_language"] = docMetadata.Language
	chunkMetadata["document_quality"] = docMetadata.Quality
	chunkMetadata["document_keywords"] = docMetadata.Keywords
	chunkMetadata["document_summary"] = docMetadata.Summary

	// 分片级别元数据
	chunkMetadata["chunk_id"] = chunk.ID
	chunkMetadata["chunk_index"] = chunk.ChunkIndex
	chunkMetadata["start_line"] = chunk.StartLine
	chunkMetadata["end_line"] = chunk.EndLine
	chunkMetadata["semantic_tag"] = chunk.SemanticTag
	chunkMetadata["chunk_created_at"] = chunk.CreatedAt

	// 合并分片的原始元数据
	for k, v := range chunk.Metadata {
		chunkMetadata["chunk_"+k] = v
	}

	return chunkMetadata
}

// matchesMetadataFilter 检查是否匹配元数据过滤条件
func (vmi *VectorMetadataIntegration) matchesMetadataFilter(metadata map[string]interface{}, filter map[string]interface{}) bool {
	if filter == nil || len(filter) == 0 {
		return true
	}

	for key, expectedValue := range filter {
		actualValue, exists := metadata[key]
		if !exists {
			return false
		}

		// 简单的值匹配，可以扩展为更复杂的匹配逻辑
		if actualValue != expectedValue {
			return false
		}
	}

	return true
}

// extractDocumentMetadataFromChunk 从分片元数据中提取文档元数据
func (vmi *VectorMetadataIntegration) extractDocumentMetadataFromChunk(chunkMetadata map[string]interface{}) *DocumentMetadata {
	metadata := &DocumentMetadata{}

	if title, ok := chunkMetadata["document_title"].(string); ok {
		metadata.Title = title
	}
	if author, ok := chunkMetadata["document_author"].(string); ok {
		metadata.Author = author
	}
	if category, ok := chunkMetadata["document_category"].(string); ok {
		metadata.Category = category
	}
	if language, ok := chunkMetadata["document_language"].(string); ok {
		metadata.Language = language
	}
	if quality, ok := chunkMetadata["document_quality"].(float64); ok {
		metadata.Quality = quality
	}
	if keywords, ok := chunkMetadata["document_keywords"].([]string); ok {
		metadata.Keywords = keywords
	}
	if summary, ok := chunkMetadata["document_summary"].(string); ok {
		metadata.Summary = summary
	}

	return metadata
}

// MetadataEnhancedProcessor 元数据增强的文档处理器
type MetadataEnhancedProcessor struct {
	docProcessor      DocumentProcessor // 假设的文档处理器接口
	metadataManager   MetadataManager
	vectorIntegration *VectorMetadataIntegration
}

// DocumentProcessor 文档处理器接口（需要与现有文档处理模块集成）
type DocumentProcessor interface {
	ProcessFile(filePath string) (*types.SplitResult, error)
	ProcessDirectory(dirPath string, recursive bool) ([]*types.SplitResult, error)
}

// NewMetadataEnhancedProcessor 创建元数据增强的文档处理器
func NewMetadataEnhancedProcessor(docProcessor DocumentProcessor, metadataManager MetadataManager, vectorStorage VectorStorage) *MetadataEnhancedProcessor {
	return &MetadataEnhancedProcessor{
		docProcessor:      docProcessor,
		metadataManager:   metadataManager,
		vectorIntegration: NewVectorMetadataIntegration(metadataManager, vectorStorage),
	}
}

// ProcessFileWithMetadata 处理文件并提取元数据
func (mep *MetadataEnhancedProcessor) ProcessFileWithMetadata(filePath string) (*types.SplitResult, *DocumentMetadata, error) {
	// 处理文档分片
	splitResult, err := mep.docProcessor.ProcessFile(filePath)
	if err != nil {
		return nil, nil, fmt.Errorf("文档处理失败: %v", err)
	}

	// 提取元数据
	metadata, err := mep.metadataManager.ProcessFile(filePath)
	if err != nil {
		return splitResult, nil, fmt.Errorf("元数据提取失败: %v", err)
	}

	// 将元数据信息添加到分片结果中
	if splitResult.Document != nil {
		splitResult.Document.ProcessTime += metadata.ExtractedAt.UnixMilli() - splitResult.Document.ProcessedAt.UnixMilli()
	}

	return splitResult, metadata, nil
}

// ProcessDirectoryWithMetadata 处理目录并提取元数据
func (mep *MetadataEnhancedProcessor) ProcessDirectoryWithMetadata(dirPath string, recursive bool) ([]*types.SplitResult, map[string]*DocumentMetadata, error) {
	// 处理文档分片
	splitResults, err := mep.docProcessor.ProcessDirectory(dirPath, recursive)
	if err != nil {
		return nil, nil, fmt.Errorf("文档处理失败: %v", err)
	}

	// 提取元数据
	metadataResults, err := mep.metadataManager.ProcessDirectory(dirPath, recursive)
	if err != nil {
		return splitResults, nil, fmt.Errorf("元数据提取失败: %v", err)
	}

	return splitResults, metadataResults, nil
}
