package mymongo

import (
    "context"
    "fmt"
    //"log"
    //"encoding/json"
    "sync"
    "time"
    "strings"
    "strconv"
    "mygo/mystruct"
    "go.mongodb.org/mongo-driver/bson"
    //"go.mongodb.org/mongo-driver/bson/primitive"
    "go.mongodb.org/mongo-driver/mongo/options"
)


func Export(client *Curs, mapList map[string]string) ([][]string, string) {
    search := bson.D{}
    var values [][]string
    // 处理时间范围查询
    if startTime, endTime := mapList["start_time"], mapList["stop_time"]; startTime != "" && endTime != "" {
        start, err := time.Parse("2006-01-02-15-04-05", startTime)
        if err != nil {
            return values, ""
        }
        end, err := time.Parse("2006-01-02-15-04-05", endTime)
        if err != nil {
            return values, ""
        }
        search = append(search, bson.E{Key: "appendtime", Value: bson.D{
            {"$gt", start.Add(-8 * time.Hour)},
            {"$lte", end.Add(-8 * time.Hour)},
        }})
    }
    
    // 处理条件查询
    if expCond := mapList["expcond"]; expCond != "" {
        conditions := strings.Split(expCond, ":")
        for _, cond := range conditions {
            parts := strings.Split(cond, ",")
            if len(parts) < 3 {
                continue
            }
            item := mystruct.FindItemsByName(client.Body.Data, parts[0])
            if item.Select == "" {
                continue
            }
            mkey := fmt.Sprintf("data.%s", parts[0])
            switch item.Select {
            case "string", "file", "pack":
                handleStringOrFileCondition(&search, mkey, parts[1], parts[2])
            case "int", "prgs":
                handleNumericCondition(&search, mkey, parts[1], parts[2])
            case "bool":
                handleBoolCondition(&search, mkey, parts[1])
            case "tag", "but":
                handleTagOrButCondition(&search, mkey, parts[1], parts[2])
            case "word":
            	handleStringOrFileCondition(&search, fmt.Sprintf("%s.name", mkey), parts[1], parts[2])
            }
        }
    }
    
    var len_table int
    var model_list []mystruct.ModelItems

    // 设置查询选项
    options := options.Find()
    if expOne := mapList["expone"]; expOne != "" {
        // 导出单选项
        expones := strings.Split(expOne, ":")

        len_table = len(expones)
        model_list = make([]mystruct.ModelItems, 0, len_table)
        all_table := make([]string, 0, len_table)
        fields := make(bson.M, len_table)
        for _, field := range expones {
            for _, key := range client.Body.Data {
                if key.Value == field {
                    fields[fmt.Sprintf("data.%s", field)] = 1
                    model_list = append(model_list, key)
                    all_table = append(all_table, fmt.Sprintf("[%s/%s]", key.Name, key.Value))
                    break
                }
            }
        }
        options.SetProjection(fields)
        values = append(values, all_table)
    } else {
    	len_table = len(client.Body.Data)
        model_list = make([]mystruct.ModelItems, 0, len_table)
        all_table := make([]string, 0, len_table)
    	for _, key := range client.Body.Data {
	        model_list = append(model_list, key)
	        all_table = append(all_table, fmt.Sprintf("[%s/%s]", key.Name, key.Value))
	    }
	    values = append(values, all_table)
    }

    // 执行查询
    ctx := context.Background()
    resCur, err := client.Cur.Find(ctx, search, options)
    if err != nil {
        return values, ""
    }
    defer resCur.Close(ctx)

    pool := newStringRowPool(len_table)
    // 批量处理结果
    batchSize := 1000
    results := make([]bson.Raw, 0, batchSize)

    for resCur.Next(ctx) {
        rawDoc := resCur.Current
        results = append(results, rawDoc)
        
        // 批量处理
        if len(results) >= batchSize {
            exportProcessBatch(results, pool, model_list, &values, client.Body.Apiid)
            results = results[:0] // 重置切片
        }
    }

    // 处理剩余文档
    if len(results) > 0 {
        exportProcessBatch(results, pool, model_list, &values, client.Body.Apiid)
    }

    // 返回结果
    return values, mapList["exptype"]
}


func exportProcessBatch(docs []bson.Raw, pool *StringRowPool, keys []mystruct.ModelItems, values *[][]string, Apiid string) {
    rows := make([][]string, len(docs))
    
    // 并行处理批量
    var wg sync.WaitGroup
    wg.Add(len(docs))
    
    for i := range docs {
        go func(idx int, doc bson.Raw) {
            defer wg.Done()
            
            row := pool.Get()

            var docStruct MongoDocument
            if err := bson.Unmarshal(doc, &docStruct); err == nil {

                for j, key := range keys {
                    val := docStruct.Data[key.Value]
                    _id := docStruct.ID.Hex()
                    switch key.Select {
                    case "string","tag","but":
                        row[j] = val.(string)
                    case "file":
                        row[j] = mystruct.JoinStrings("/api/download?code=", Apiid, "&_id=", _id, "&", key.Value, "=", val.(string))
                    case "pack":
                        row[j] = mystruct.JoinStrings("/api/pack?code=", Apiid, "&_id=", _id, "&name=", key.Value)
                    case "word":
                        row[j] = mystruct.JoinStrings("/api/word?code=", Apiid, "&_id=", _id, "&name=", key.Value)
                    case "bool":
                        if val.(bool) { 
                            row[j] = "true"
                        } else {
                            row[j] = "false"
                        }
                    case "int", "prgs":
                        row[j] = strconv.FormatInt(val.(int64), 10)
                    default:
                        row[j] = val.(string)
                    }

                }
            }
            copyRow := make([]string, len(row))
            copy(copyRow, row)
            rows[idx] = copyRow

            pool.Put(row)
        }(i, docs[i])
    }
    
    wg.Wait()
    // 按顺序添加到结果集
    /*for _, row := range rows {
        rowCopy := make([]string, len(row))
        copy(rowCopy, row)
        *values = append(*values, rowCopy)
        pool.Put(row) // 放回对象池
    }*/
    *values = append(*values, rows...)
}
