package logs

import (
	"bytes"
	"context"
	"encoding/json"
	"github.com/dustin/go-humanize"
	"github.com/elastic/go-elasticsearch/v8"
	"github.com/elastic/go-elasticsearch/v8/esutil"
	log "github.com/sirupsen/logrus"
	"strings"
	"sync/atomic"
	"time"
)

var g_es_client *EsConf

type EsConf struct {
	Es *elasticsearch.Client
}

func init() {
	g_es_client = new(EsConf)
}

func (e *EsConf) initEsClient(es_addr, es_pwd, http_ca string) (*elasticsearch.Client, error) {
	cfg := elasticsearch.Config{
		Addresses: []string{es_addr},
		Username:  "elastic",
		Password:  es_pwd}
	es, err := elasticsearch.NewClient(cfg)
	if err != nil {
		return nil, err
	}

	_, err = es.Ingest.PutPipeline("geoip", strings.NewReader(`{
	  "description" : "Add geoip info",
	"processors": [
	{"geoip": {
				  "field": "source_addr","target_field": "from"
				}},{"geoip": {
										  "field": "host","target_field": "to"
										}}
	]
	}`))
	if err != nil {
		log.Errorf("put pipeline error: %v", err)
	}

	return es, nil
}

func SendDataToEs(index string, data []interface{}) error {
	return g_es_client.sendDataToEs(index, data)
}

func (e *EsConf) sendDataToEs(index string, raw_data []interface{}) error {
	var countSuccessful uint64
	var pipeline string

	//if index == "query-log" {
	//	pipeline = "geoip"
	//}

	bi, err := esutil.NewBulkIndexer(esutil.BulkIndexerConfig{
		Index:         index,            // The default index name
		Client:        e.Es,             // The Elasticsearch client
		NumWorkers:    8,                // The number of worker goroutines
		FlushBytes:    int(1024 * 1024), // The flush threshold in bytes,default 1MB
		FlushInterval: 10 * time.Second, // The periodic flush interval
		Pipeline:      pipeline,         // The pipeline to use
		/*
			PUT _ingest/pipeline/geoip
			{
			  "description" : "Add geoip info",
			"processors": [
			{"geoip": {
			  "field": "source_addr","target_field": "from"
			}},{"geoip": {
			  "field": "host","target_field": "to"
			}}
			]
			}
		*/
	})
	if err != nil {
		return err
	}

	start := time.Now().UTC()

	for _, a := range raw_data {
		// Prepare the data payload: encode article to JSON
		//
		data, err := json.Marshal(a)
		if err != nil {
			log.Errorf("Cannot encode qps data %v+: %s", a, err)
		}
		//raw_data[index] = nil

		// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
		//
		// Add an item to the BulkIndexer
		//
		err = bi.Add(
			context.Background(),
			esutil.BulkIndexerItem{
				// Action field configures the operation to perform (index, create, delete, update)
				Action: "index",

				// DocumentID is the (optional) document ID
				//DocumentID: strconv.Itoa(a.ID),

				// Body is an `io.Reader` with the payload
				Body: bytes.NewReader(data),

				// OnSuccess is called for each successful operation
				OnSuccess: func(ctx context.Context, item esutil.BulkIndexerItem, res esutil.BulkIndexerResponseItem) {
					atomic.AddUint64(&countSuccessful, 1)
				},

				// OnFailure is called for each failed operation
				OnFailure: func(ctx context.Context, item esutil.BulkIndexerItem, res esutil.BulkIndexerResponseItem, err error) {
					if err != nil {
						log.Errorf("ERROR: %s", err)
					} else {
						//			log.Errorf("ERROR: %s: %s", res.Error.Type, res.Error.Reason)
					}
				},
			},
		)
		if err != nil {
			log.Errorf("Unexpected error: %s", err)
		}
		// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
	}

	raw_data = nil
	//runtime.GC()

	// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
	// Close the indexer
	//
	//bi.Flush(context.Background())
	if err := bi.Close(context.Background()); err != nil {
		log.Errorf("Unexpected error: %s", err)
	}
	// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

	biStats := bi.Stats()

	// Report the results: number of indexed docs, number of errors, duration, indexing rate
	//
	log.Println(strings.Repeat("▔", 65))

	dur := time.Since(start)

	if biStats.NumFailed > 0 {
		log.Errorf(
			"Indexed %s: [%s] documents with [%s] errors in %s (%s docs/sec)",
			index,
			humanize.Comma(int64(biStats.NumFlushed)),
			humanize.Comma(int64(biStats.NumFailed)),
			dur.Truncate(time.Millisecond),
			humanize.Comma(int64(1000.0/float64(dur/time.Millisecond)*float64(biStats.NumFlushed))),
		)
	} else {
		log.Tracef(
			"Sucessfuly indexed %s: [%s] documents in %s (%s docs/sec)",
			index,
			humanize.Comma(int64(biStats.NumFlushed)),
			dur.Truncate(time.Millisecond),
			humanize.Comma(int64(1000.0/float64(dur/time.Millisecond)*float64(biStats.NumFlushed))),
		)
	}

	return nil
}
