package demo

import (
	"context"
	"fmt"
	"log"
	"time"

	"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
)

// SensitiveDataSummary 映射表结构
type SensitiveDataSummary struct {
	FileID         string    `json:"file_id"`
	DataLabel      string    `json:"data_label"`
	MaxDataLevel   uint8     `json:"final_max_data_level"`
	DetectionCount uint64    `json:"final_detection_count"`
	LastDetectedAt time.Time `json:"final_last_detected_at"`
}

// SensitiveDataSummaryOverview 映射概览表结构
type SensitiveDataSummaryOverview struct {
	FileID                   string    `json:"file_id"`
	TotalDetectionCount      uint64    `json:"total_detection_count"`
	MaxDataLevelAcrossLabels uint8     `json:"max_data_level_across_labels"`
	DistinctLabelsCount      uint64    `json:"distinct_labels_count"`
	LastDetectedAt           time.Time `json:"last_detected_at"`
}

func RunAggregatingDemo(conn driver.Conn) {
	// 测试连接
	if err := conn.Ping(context.Background()); err != nil {
		log.Fatal(err)
	}

	// 查询 sensitive_data_summary 表
	query := `
SELECT
    file_id,
    data_label,
    max(max_data_level) AS final_max_data_level,
    sum(detection_count) AS final_detection_count,
    max(last_detected_at) AS final_last_detected_at
FROM sensitive_data_summary
FINAL
WHERE time_bucket>= now() - INTERVAL 1 DAY
GROUP BY file_id, data_label
ORDER BY final_detection_count DESC;
    `

	rows, err := conn.Query(context.Background(), query)
	if err != nil {
		log.Fatal(err)
	}

	var records []SensitiveDataSummary

	// 遍历结果
	for rows.Next() {
		var record SensitiveDataSummary
		err := rows.Scan(
			&record.FileID,
			&record.DataLabel,
			&record.MaxDataLevel,
			&record.DetectionCount,
			&record.LastDetectedAt,
		)
		if err != nil {
			log.Fatal("Row scan failed: ", err)
		}
		records = append(records, record)
	}

	// 打印结果
	fmt.Printf("Found %d records:\n", len(records))
	for i, r := range records {
		fmt.Printf("[%d] FileID=%s, Label=%s, MaxLevel=%d, Count=%d, LastDetected=%s\n",
			i+1,
			r.FileID,
			r.DataLabel,
			r.MaxDataLevel,
			r.DetectionCount,
			r.LastDetectedAt.Format("2006-01-02 15:04:05.000000"),
		)
	}

	if err = rows.Err(); err != nil {
		log.Fatal("Row iteration error: ", err)
	}
}

// RunAggregatingOverviewDemo 演示从 sensitive_data_summary_overview 表查询数据
func RunAggregatingOverviewDemo(conn driver.Conn) {
	// 测试连接
	if err := conn.Ping(context.Background()); err != nil {
		log.Fatal(err)
	}

	// 查询 sensitive_data_summary_overview 表
	query := `
SELECT 
    file_id,
    sum(total_detection_count) AS total_detection_count,
    max(max_data_level_across_labels) AS max_data_level_across_labels,
    uniqMerge(distinct_labels_state) AS distinct_labels_count,
    max(last_detected_at) AS last_detected_at
FROM sensitive_data_summary_overview
FINAL
WHERE time_bucket >= now() - INTERVAL 1 DAY
GROUP BY file_id
LIMIT 100;
    `

	rows, err := conn.Query(context.Background(), query)
	if err != nil {
		log.Fatal(err)
	}

	var records []SensitiveDataSummaryOverview

	// 遍历结果
	for rows.Next() {
		var record SensitiveDataSummaryOverview
		err := rows.Scan(
			&record.FileID,
			&record.TotalDetectionCount,
			&record.MaxDataLevelAcrossLabels,
			&record.DistinctLabelsCount,
			&record.LastDetectedAt,
		)
		if err != nil {
			log.Fatal("Row scan failed: ", err)
		}
		records = append(records, record)
	}

	// 打印结果
	fmt.Printf("Found %d overview records:\n", len(records))
	for i, r := range records {
		fmt.Printf("[%d] FileID=%s, TotalCount=%d, MaxLevel=%d, DistinctLabels=%d, LastDetected=%s\n",
			i+1,
			r.FileID,
			r.TotalDetectionCount,
			r.MaxDataLevelAcrossLabels,
			r.DistinctLabelsCount,
			r.LastDetectedAt.Format("2006-01-02 15:04:05.000000"),
		)
	}

	if err = rows.Err(); err != nil {
		log.Fatal("Row iteration error: ", err)
	}
}
