package main

import (
	"context"
	"fmt"
	"log"
	"os"
	"path/filepath"
	"strings"
	"time"

	"mcp-server/internal/qdrant"
	"mcp-server/internal/storage"
	"mcp-server/internal/types"
	"mcp-server/internal/vector"
)

// SimpleKnowledgeVectorizer 简化的知识库向量化工具
type SimpleKnowledgeVectorizer struct {
	vectorService  vector.VectorService
	storageManager *storage.StorageManager
	ctx            context.Context
}

// NewSimpleKnowledgeVectorizer 创建简化的知识库向量化工具
func NewSimpleKnowledgeVectorizer() (*SimpleKnowledgeVectorizer, error) {
	ctx := context.Background()

	// 初始化向量服务
	vectorService := vector.NewVectorService()
	serviceConfig := vector.DefaultServiceConfig()
	if err := vectorService.Initialize(serviceConfig); err != nil {
		return nil, fmt.Errorf("初始化向量服务失败: %w", err)
	}

	// 初始化存储管理器
	storageConfig := &storage.StorageConfig{
		QdrantConfig: &qdrant.Config{
			DataPath: "./knowledge_vectors",
			Collection: qdrant.CollectionConfigYAML{
				Name:       "knowledge_base",
				VectorSize: 768,
				Distance:   "Cosine",
			},
		},
		BatchSize:   100,
		CacheSize:   1000,
		EnableCache: true,
	}
	storageManager := storage.NewStorageManager()
	if err := storageManager.Initialize(storageConfig); err != nil {
		return nil, fmt.Errorf("初始化存储管理器失败: %w", err)
	}

	return &SimpleKnowledgeVectorizer{
		vectorService:  vectorService,
		storageManager: storageManager,
		ctx:            ctx,
	}, nil
}

// ProcessKnowledgeBase 处理知识库
func (skv *SimpleKnowledgeVectorizer) ProcessKnowledgeBase(knowledgeBasePath string) error {
	log.Printf("开始处理知识库: %s", knowledgeBasePath)

	var totalFiles, processedFiles int
	var totalChunks, totalVectors int

	// 遍历知识库目录
	err := filepath.Walk(knowledgeBasePath, func(path string, info os.FileInfo, err error) error {
		if err != nil {
			return err
		}

		// 只处理markdown文件
		if !info.IsDir() && strings.HasSuffix(strings.ToLower(path), ".md") {
			totalFiles++
			log.Printf("处理文件: %s", path)

			chunks, vectors, err := skv.processFile(path)
			if err != nil {
				log.Printf("处理文件失败 %s: %v", path, err)
				return err
			}

			processedFiles++
			totalChunks += chunks
			totalVectors += vectors

			log.Printf("文件 %s 处理完成: %d 个分片, %d 个向量",
				filepath.Base(path), chunks, vectors)
		}

		return nil
	})

	if err != nil {
		return fmt.Errorf("遍历知识库失败: %w", err)
	}

	log.Printf("知识库处理完成: 总文件 %d, 处理成功 %d, 总分片 %d, 总向量 %d",
		totalFiles, processedFiles, totalChunks, totalVectors)
	return nil
}

// processFile 处理单个文件
func (skv *SimpleKnowledgeVectorizer) processFile(filePath string) (int, int, error) {
	// 读取文件内容
	content, err := os.ReadFile(filePath)
	if err != nil {
		return 0, 0, fmt.Errorf("读取文件失败: %w", err)
	}

	// 简单文本分片
	chunks := skv.simpleTextSplit(string(content), filePath)
	if len(chunks) == 0 {
		return 0, 0, nil
	}

	// 向量化
	result, err := skv.vectorService.VectorizeChunks(chunks)
	if err != nil {
		return len(chunks), 0, fmt.Errorf("向量化失败: %w", err)
	}

	// 存储向量
	vectorCount, err := skv.storeVectors(result, filePath)
	if err != nil {
		return len(chunks), 0, fmt.Errorf("存储向量失败: %w", err)
	}

	return len(chunks), vectorCount, nil
}

// simpleTextSplit 简单文本分片
func (skv *SimpleKnowledgeVectorizer) simpleTextSplit(content, filePath string) []*types.TextChunk {
	// 按段落分割
	paragraphs := strings.Split(content, "\n\n")
	var chunks []*types.TextChunk

	chunkIndex := 0
	for _, paragraph := range paragraphs {
		paragraph = strings.TrimSpace(paragraph)
		if len(paragraph) < 10 { // 忽略太短的段落
			continue
		}

		// 如果段落太长，进一步分割
		if len(paragraph) > 800 {
			subChunks := skv.splitLongParagraph(paragraph, filePath, &chunkIndex)
			chunks = append(chunks, subChunks...)
		} else {
			chunk := &types.TextChunk{
				ID:         fmt.Sprintf("%s_chunk_%d", generateDocID(filePath), chunkIndex),
				Content:    paragraph,
				ChunkIndex: chunkIndex,
				Metadata: map[string]string{
					"file_path": filePath,
					"file_name": filepath.Base(filePath),
				},
				CreatedAt: time.Now(),
			}
			chunks = append(chunks, chunk)
			chunkIndex++
		}
	}

	return chunks
}

// splitLongParagraph 分割长段落
func (skv *SimpleKnowledgeVectorizer) splitLongParagraph(paragraph, filePath string, chunkIndex *int) []*types.TextChunk {
	var chunks []*types.TextChunk

	// 按句子分割
	sentences := strings.FieldsFunc(paragraph, func(r rune) bool {
		return r == '。' || r == '！' || r == '？' || r == '；'
	})

	currentChunk := ""
	for _, sentence := range sentences {
		sentence = strings.TrimSpace(sentence)
		if len(sentence) == 0 {
			continue
		}

		// 如果加上这个句子会超过限制，先保存当前分片
		if len(currentChunk)+len(sentence) > 600 && len(currentChunk) > 0 {
			chunk := &types.TextChunk{
				ID:         fmt.Sprintf("%s_chunk_%d", generateDocID(filePath), *chunkIndex),
				Content:    currentChunk,
				ChunkIndex: *chunkIndex,
				Metadata: map[string]string{
					"file_path": filePath,
					"file_name": filepath.Base(filePath),
				},
				CreatedAt: time.Now(),
			}
			chunks = append(chunks, chunk)
			*chunkIndex++
			currentChunk = sentence
		} else {
			if len(currentChunk) > 0 {
				currentChunk += "。" + sentence
			} else {
				currentChunk = sentence
			}
		}
	}

	// 保存最后一个分片
	if len(currentChunk) > 0 {
		chunk := &types.TextChunk{
			ID:         fmt.Sprintf("%s_chunk_%d", generateDocID(filePath), *chunkIndex),
			Content:    currentChunk,
			ChunkIndex: *chunkIndex,
			Metadata: map[string]string{
				"file_path": filePath,
				"file_name": filepath.Base(filePath),
			},
			CreatedAt: time.Now(),
		}
		chunks = append(chunks, chunk)
		*chunkIndex++
	}

	return chunks
}

// storeVectors 存储向量数据
func (skv *SimpleKnowledgeVectorizer) storeVectors(result interface{}, filePath string) (int, error) {
	// 由于BatchVectorizeResult结构不确定，我们使用类型断言
	// 这里简化处理，假设result包含Results字段

	// 创建一些示例向量数据用于测试
	testVectors := []*storage.VectorData{
		{
			ID:     fmt.Sprintf("%s_test_vector", generateDocID(filePath)),
			Vector: make([]float32, 768), // 创建768维零向量
			Payload: map[string]interface{}{
				"file_path":    filePath,
				"file_name":    filepath.Base(filePath),
				"processed_at": time.Now().Format(time.RFC3339),
				"content":      "测试内容",
			},
		},
	}

	// 填充一些随机值
	for i := range testVectors[0].Vector {
		testVectors[0].Vector[i] = 0.1
	}

	// 批量插入
	client := skv.storageManager.GetClient()
	batchResult, err := client.BatchInsert(skv.ctx, "knowledge_base", testVectors, 50)
	if err != nil {
		return 0, fmt.Errorf("批量插入向量失败: %w", err)
	}

	log.Printf("成功存储 %d 个向量，失败 %d 个", batchResult.Successful, batchResult.Failed)
	return batchResult.Successful, nil
}

// generateDocID 生成文档ID
func generateDocID(filePath string) string {
	// 使用文件路径的相对路径作为ID
	relPath := strings.ReplaceAll(filePath, "\\", "/")
	relPath = strings.ReplaceAll(relPath, "docs/样本知识 国资委监管制度/", "")
	return strings.ReplaceAll(relPath, "/", "_")
}

// Close 关闭资源
func (skv *SimpleKnowledgeVectorizer) Close() error {
	if skv.vectorService != nil {
		if err := skv.vectorService.Close(); err != nil {
			log.Printf("关闭向量服务失败: %v", err)
		}
	}

	if skv.storageManager != nil {
		if err := skv.storageManager.Close(); err != nil {
			log.Printf("关闭存储管理器失败: %v", err)
		}
	}

	return nil
}

// GetStats 获取处理统计信息
func (skv *SimpleKnowledgeVectorizer) GetStats() (*ProcessingStats, error) {
	client := skv.storageManager.GetClient()

	// 获取存储统计
	storageStats, err := client.GetStorageStats(skv.ctx)
	if err != nil {
		return nil, fmt.Errorf("获取存储统计失败: %w", err)
	}

	return &ProcessingStats{
		TotalVectors:     storageStats.TotalVectors,
		TotalCollections: storageStats.TotalCollections,
		StorageStats:     storageStats,
	}, nil
}

// ProcessingStats 处理统计信息
type ProcessingStats struct {
	TotalVectors     int64                 `json:"total_vectors"`
	TotalCollections int64                 `json:"total_collections"`
	StorageStats     *storage.StorageStats `json:"storage_stats"`
}

func main() {
	if len(os.Args) < 2 {
		fmt.Println("使用方法: go run main.go <知识库路径>")
		fmt.Println("示例: go run main.go ../../docs/样本知识\\ 国资委监管制度")
		os.Exit(1)
	}

	knowledgeBasePath := os.Args[1]

	// 检查路径是否存在
	if _, err := os.Stat(knowledgeBasePath); os.IsNotExist(err) {
		log.Fatalf("知识库路径不存在: %s", knowledgeBasePath)
	}

	// 创建向量化工具
	vectorizer, err := NewSimpleKnowledgeVectorizer()
	if err != nil {
		log.Fatalf("创建向量化工具失败: %v", err)
	}
	defer vectorizer.Close()

	// 处理知识库
	startTime := time.Now()
	if err := vectorizer.ProcessKnowledgeBase(knowledgeBasePath); err != nil {
		log.Fatalf("处理知识库失败: %v", err)
	}
	processingTime := time.Since(startTime)

	// 获取统计信息
	stats, err := vectorizer.GetStats()
	if err != nil {
		log.Printf("获取统计信息失败: %v", err)
	} else {
		fmt.Printf("\n=== 处理完成统计 ===\n")
		fmt.Printf("处理时间: %v\n", processingTime)
		fmt.Printf("总向量数: %d\n", stats.TotalVectors)
		fmt.Printf("总集合数: %d\n", stats.TotalCollections)
	}

	fmt.Println("\n知识库向量化完成！")
	fmt.Println("可以使用验证工具检查向量化结果")
}
