// Tencent is pleased to support the open source community by making
// 蓝鲸智云 - 监控平台/日志平台 (BlueKing - Monitor/Log) available.
// Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved.
// Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://opensource.org/licenses/MIT
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//

package converter

import (
	"strconv"

	"github.com/elastic/beats/libbeat/common"
	"go.opentelemetry.io/collector/model/pdata"

	"github.com/TencentBlueKing/bk-collector/define"
)

type metricsEvent struct {
	define.CommonEvent
}

func (e metricsEvent) RecordType() define.RecordType { return define.RecordMetrics }

var MetricsConverter EventConverter = metricsConverter{}

type metricsConverter struct{}

func (c metricsConverter) ToEvent(dataId int32, data common.MapStr) define.Event {
	return metricsEvent{define.NewCommonEvent(dataId, data)}
}

func (c metricsConverter) ToDataID(record *define.Record) int32 {
	return record.Token.MetricsDataId
}

func (c metricsConverter) Convert(record *define.Record) define.Events {
	var events define.Events
	pdMetrics, ok := record.Data.(pdata.Metrics)
	if !ok {
		return events
	}
	resourceMetrics := pdMetrics.ResourceMetrics()
	if resourceMetrics.Len() == 0 {
		return nil
	}
	dataId := c.ToDataID(record)

	for i := 0; i < resourceMetrics.Len(); i++ {
		resourceMetric := resourceMetrics.At(i)
		resources := resourceMetric.Resource().Attributes()
		instMetrics := resourceMetric.InstrumentationLibraryMetrics()
		for j := 0; j < instMetrics.Len(); j++ {
			instMet := instMetrics.At(j).Metrics()
			for k := 0; k < instMet.Len(); k++ {
				for _, dp := range c.Extract(instMet.At(k), resources) {
					events = append(events, c.ToEvent(dataId, dp))
				}
			}
		}
	}

	return events
}

func (c metricsConverter) newItem(metrics map[string]float64, dimensions map[string]interface{}, ts int64) common.MapStr {
	return common.MapStr{
		"metrics":   metrics,
		"target":    "otlp",
		"dimension": dimensions,
		"timestamp": ts,
	}
}

func (c metricsConverter) Extract(pdMetric pdata.Metric, resources pdata.AttributeMap) []common.MapStr {
	var items []common.MapStr
	switch pdMetric.DataType() {
	case pdata.MetricDataTypeSum:
		dps := pdMetric.Sum().DataPoints()
		for i := 0; i < dps.Len(); i++ {
			dp := dps.At(i)
			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name(): dp.DoubleVal()},
				MergeReplaceAttributeMaps(dp.Attributes(), resources),
				dp.Timestamp().AsTime().Unix(),
			))
		}

	case pdata.MetricDataTypeHistogram:
		dps := pdMetric.Histogram().DataPoints()
		for i := 0; i < dps.Len(); i++ {
			dp := dps.At(i)
			dimensions := MergeReplaceAttributeMaps(dp.Attributes(), resources)
			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name() + "_sum": dp.Sum()},
				dimensions,
				dp.Timestamp().AsTime().Unix(),
			))
			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name() + "_count": float64(dp.Count())},
				dimensions,
				dp.Timestamp().AsTime().Unix(),
			))

			if len(dp.ExplicitBounds()) != len(dp.BucketCounts()) {
				return items
			}

			bounds := dp.ExplicitBounds()
			bucketCounts := dp.BucketCounts()
			for j := 0; j < len(dp.ExplicitBounds()); j++ {
				additional := map[string]interface{}{"le": strconv.FormatFloat(bounds[j], 'f', -1, 64)}
				items = append(items, c.newItem(
					map[string]float64{pdMetric.Name() + "_bucket": float64(bucketCounts[j])},
					MergeReplaceMaps(additional, dimensions),
					dp.Timestamp().AsTime().Unix(),
				))
			}
		}

	case pdata.MetricDataTypeGauge:
		dps := pdMetric.Gauge().DataPoints()
		for i := 0; i < dps.Len(); i++ {
			dp := dps.At(i)
			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name(): dp.DoubleVal()},
				MergeReplaceAttributeMaps(dp.Attributes(), resources),
				dp.Timestamp().AsTime().Unix(),
			))
		}

	case pdata.MetricDataTypeSummary:
		dps := pdMetric.Summary().DataPoints()
		for i := 0; i < dps.Len(); i++ {
			dp := dps.At(i)
			dimensions := MergeReplaceAttributeMaps(dp.Attributes(), resources)

			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name() + "_sum": dp.Sum()},
				dimensions,
				dp.Timestamp().AsTime().Unix(),
			))
			items = append(items, c.newItem(
				map[string]float64{pdMetric.Name() + "_count": float64(dp.Count())},
				dimensions,
				dp.Timestamp().AsTime().Unix(),
			))

			quantile := dp.QuantileValues()
			for j := 0; j < quantile.Len(); j++ {
				qua := quantile.At(j)
				additional := map[string]interface{}{"quantile": strconv.FormatFloat(qua.Quantile(), 'f', -1, 64)}
				items = append(items, c.newItem(
					map[string]float64{pdMetric.Name(): qua.Value()},
					MergeReplaceMaps(additional, dimensions),
					dp.Timestamp().AsTime().Unix(),
				))
			}
		}
	}

	return items
}
