package mymongo

import (
    //"fmt"
    "context"
    "time"
    "sync"
    "mygo/mystruct"
    "go.mongodb.org/mongo-driver/bson"
    "go.mongodb.org/mongo-driver/mongo/options"
    "go.mongodb.org/mongo-driver/bson/primitive"
)

func FastWacthAll(data map[string]interface{}, limit *mystruct.Mypage, client *Curs) (*mystruct.Retdate, []primitive.M) {
	results := &mystruct.Retdate{"WacthData", mystruct.MyCodeOk, "succeed"}
	
	// 构建基础搜索条件（不包含分页的_id条件）
	search := bson.D{}
	statStr, ok1 := data["start_time"].(string)
	stopStr, ok2 := data["stop_time"].(string)
	
	// 处理时间条件
	if ok1 && ok2 && statStr != "" && stopStr != "" {
		stat, _ := time.Parse("2006-01-02-15-04-05", statStr)
		stop, _ := time.Parse("2006-01-02-15-04-05", stopStr)
		search = append(search, bson.E{Key :"appendtime", Value: bson.D{
			{"$gt", stat.Add(-8 * time.Hour)},
			{"$lte", stop.Add(-8 * time.Hour)},
		}})
	}
	delete(data, "start_time") 
	delete(data, "stop_time")
	// 处理其他条件
	search_build(data, &search, client, "0")

	// 预查询：获取起始文档（应用所有条件，除分页的_id）
    skip := (limit.Page) * limit.Limit
    opts := options.FindOne().SetSort(bson.D{{"_id", client.Body.Sort}}).SetSkip(int64(skip))
    var startDoc mystruct.SearchItem
    if err := client.Cur.FindOne(context.TODO(), search, opts).Decode(&startDoc); err != nil {
        results.Mesg = err.Error()
		results.Code = mystruct.MyCodeErr
		return results, nil
    }
    // 添加分页的_id条件到后续查询
	if client.Body.Sort == 1 {
		search = append(search, bson.E{Key: "_id", Value: bson.M{"$gt": startDoc.ID}})
	}else{
		search = append(search, bson.E{Key: "_id", Value: bson.M{"$lt": startDoc.ID}})
	}

	// 执行后续查询
	findOptions := options.Find()
	findOptions.SetLimit(limit.Limit-1)
	//findOptions.SetBatchSize(1000)
	//findOptions.SetMaxTime(30 * time.Second)
	findOptions.SetSort(bson.D{{"_id", client.Body.Sort}})
	ctx := context.Background()
	res_cur, err := client.Cur.Find(ctx, search, findOptions)
	if err != nil {
		results.Mesg = err.Error()
		results.Code = mystruct.MyCodeErr
		return results, nil
	}
	defer res_cur.Close(ctx)

	var mapList []primitive.M
    startDocData := startDoc.Data
    startDocData["_id"] = startDoc.ID.Hex()
    mapList = append(mapList, startDocData)

    pool := newPrimitiveRowPool()
    // 批量处理结果
    batchSize := 1000
    result := make([]bson.Raw, 0, batchSize)

    for res_cur.Next(ctx) {
        rawDoc := res_cur.Current
        result = append(result, rawDoc)
        
        // 批量处理
        if len(result) >= batchSize {
            fastProcessBatch(result, pool, &mapList)
            result = result[:0] // 重置切片
        }
    }

    // 处理剩余文档
    if len(result) > 0 {
        fastProcessBatch(result, pool, &mapList)
    }

	return results, mapList
}

func fastProcessBatch(docs []bson.Raw, pool *PrimitiveRowPool, values *[]primitive.M) {
    output := make([]primitive.M, len(docs))

    // 并行处理批量
    var wg sync.WaitGroup
    wg.Add(len(docs))
    
    for i := range docs {
        go func(idx int, doc bson.Raw) {
            defer wg.Done()
            row := pool.Get()

            var docStruct MongoDocument
            bson.Unmarshal(doc, &docStruct)

            for k, v := range docStruct.Data {
                row[k] = v
            }
            row["_id"] = docStruct.ID.Hex()
            
            // 直接创建拷贝，避免二次遍历
            copyRow := make(primitive.M, len(row))
            for k, v := range row {
                copyRow[k] = v
            }
            output[idx] = copyRow
            
            pool.Put(row)
        }(i, docs[i])
    }
    
    wg.Wait()

    *values = append(*values, output...)
}