// Copyright (C) Kumo inc. and its affiliates.
// Author: Jeff.li lijippy@163.com
// All rights reserved.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program.  If not, see <https://www.gnu.org/licenses/>.
//


#include <pollux/functions/prestosql/aggregates/classification_aggregation.h>
#include <pollux/common/base/io_utils.h>
#include <pollux/exec/aggregate.h>
#include <pollux/functions/prestosql/aggregates/aggregate_names.h>
#include <pollux/vector/flat_vector.h>

namespace kumo::pollux::aggregate::prestosql {
    namespace {
        enum class ClassificationType {
            kFallout = 0,
            kPrecision = 1,
            kRecall = 2,
            kMissRate = 3,
            kThresholds = 4,
        };

        /// Struct to represent the bucket of the FixedDoubleHistogram
        /// at a given index.
        struct Bucket {
            Bucket(double _left, double _right, double _weight)
                : left(_left), right(_right), weight(_weight) {
            }

            const double left;
            const double right;
            const double weight;
        };

        /// Fixed-bucket histogram of weights as doubles. For each bucket, it stores the
        /// total weight accumulated.
        class FixedDoubleHistogram {
        public:
            explicit FixedDoubleHistogram(HashStringAllocator *allocator)
                : weights_(StlAllocator<double>(allocator)) {
            }

            void resizeWeights() {
                validateParameters(bucketCount_, min_, max_);
                weights_.resize(bucketCount_);
            }

            /// API to support the case when bucket is created without a bucketCount
            /// count.
            void tryInit(int64_t bucketCount) {
                if (bucketCount_ == -1) {
                    bucketCount_ = bucketCount;
                    resizeWeights();
                }
            }

            /// Add weight to bucket based on the value of the prediction.
            void add(double pred, double weight) {
                if (weight == 0) {
                    return;
                }
                if (weight < 0) {
                    POLLUX_USER_FAIL("Weight must be non-negative.");
                }
                if (pred < kMinPredictionValue || pred > kMaxPredictionValue) {
                    POLLUX_USER_FAIL(
                        "Prediction value must be between {} and {}",
                        kMinPredictionValue,
                        kMaxPredictionValue);
                }
                auto index = getIndexForValue(bucketCount_, min_, max_, pred);
                weights_.at(index) += weight;
                totalWeights_ += weight;
                maxUsedIndex_ = std::max(maxUsedIndex_, index);
            }

            /// Returns a bucket in this histogram at a given index.
            Bucket getBucket(int64_t index) {
                return Bucket(
                    getLeftValueForIndex(bucketCount_, min_, max_, index),
                    getRightValueForIndex(bucketCount_, min_, max_, index),
                    weights_.at(index));
            }

            /// The size of the histogram is represented by maxUsedIndex_, which
            /// represents the largest index in the buckets with a non-zero accrued value.
            /// This helps us avoid O(n) operation for the size of the histogram.
            int64_t size() const {
                return maxUsedIndex_ + 1;
            }

            int64_t bucketCount() const {
                return bucketCount_;
            }

            /// The state of the histogram can be serialized into a buffer. The format is
            /// represented as [header][bucketCount][min][max][weights]. The header is
            /// used to identify the version of the serialization format. The bucketCount,
            /// min, and max are used to represent the parameters of the histogram.
            /// Weights are the number of weights (equal to number of buckets) in the
            /// histogram.
            size_t serialize(char *output) const {
                POLLUX_CHECK(output);
                common::OutputByteStream stream(output);
                size_t bytesUsed = 0;
                stream.append(
                    reinterpret_cast<const char *>(&kSerializationVersionHeader),
                    sizeof(kSerializationVersionHeader));
                bytesUsed += sizeof(kSerializationVersionHeader);

                stream.append(
                    reinterpret_cast<const char *>(&bucketCount_), sizeof(bucketCount_));
                bytesUsed += sizeof(bucketCount_);

                stream.append(reinterpret_cast<const char *>(&min_), sizeof(min_));
                bytesUsed += sizeof(min_);

                stream.append(reinterpret_cast<const char *>(&max_), sizeof(max_));
                bytesUsed += sizeof(max_);

                for (auto weight: weights_) {
                    stream.append(reinterpret_cast<const char *>(&weight), sizeof(weight));
                    bytesUsed += sizeof(weight);
                }

                return bytesUsed;
            }

            /// Merges the current histogram with another histogram represented as a
            /// buffer.
            void mergeWith(const char *data, size_t expectedSize) {
                auto input = common::InputByteStream(data);
                deserialize(*this, input, expectedSize);
            }

            size_t serializationSize() const {
                return sizeof(kSerializationVersionHeader) + sizeof(bucketCount_) +
                       sizeof(min_) + sizeof(max_) + (weights_.size() * sizeof(double));
            }

            /// This represents the total accrued weights in the bucket. The value is
            /// cached to avoid recomputing it every time it is needed.
            double totalWeights() const {
                return totalWeights_;
            }

        private:
            /// Deserializes the histogram from a buffer.
            static void deserialize(
                FixedDoubleHistogram &histogram,
                common::InputByteStream &in,
                size_t expectedSize) {
                if (MELON_UNLIKELY(expectedSize < minDeserializedBufferSize())) {
                    POLLUX_USER_FAIL(
                        "Cannot deserialize FixedDoubleHistogram. Expected size: {}, actual size: {}",
                        minDeserializedBufferSize(),
                        expectedSize);
                }

                uint8_t version;
                in.copyTo(&version, 1);
                POLLUX_CHECK_EQ(version, kSerializationVersionHeader);

                int64_t bucketCount;
                double min;
                double max;
                in.copyTo(&bucketCount, 1);
                in.copyTo(&min, 1);
                in.copyTo(&max, 1);

                /// This accounts for the case when the histogram is not initialized yet.

                if (histogram.bucketCount_ == -1) {
                    histogram.bucketCount_ = bucketCount;
                    histogram.min_ = min;
                    histogram.max_ = max;
                    histogram.resizeWeights();
                } else {
                    /// When merging histograms, all the parameters except for the values
                    /// accrued inside the buckets must be the same.
                    if (histogram.bucketCount_ != bucketCount) {
                        POLLUX_USER_FAIL(
                            "Bucket count must be constant."
                            "Left bucket count: {}, right bucket count: {}",
                            histogram.bucketCount_,
                            bucketCount);
                    }

                    if (histogram.min_ != min || histogram.max_ != max) {
                        POLLUX_USER_FAIL(
                            "Cannot merge histograms with different min/max values. "
                            "Left min: {}, left max: {}, right min: {}, right max: {}",
                            histogram.min_,
                            histogram.max_,
                            min,
                            max);
                    }
                }

                for (int64_t i = 0; i < bucketCount; ++i) {
                    double weight;
                    in.copyTo(&weight, 1);
                    histogram.weights_[i] += weight;
                    histogram.totalWeights_ += weight;
                    if (weight != 0) {
                        histogram.maxUsedIndex_ = std::max(histogram.maxUsedIndex_, i);
                    }
                }
                const size_t bytesRead = sizeof(kSerializationVersionHeader) +
                                         sizeof(bucketCount) + sizeof(min) + sizeof(max) +
                                         (bucketCount * sizeof(double));
                POLLUX_CHECK_EQ(bytesRead, expectedSize);
                return;
            }

            /// The minimium size of a valid buffer to deserialize a histogram.
            static constexpr size_t minDeserializedBufferSize() {
                return (
                    sizeof(kSerializationVersionHeader) + sizeof(int64_t) + sizeof(double) +
                    /// 2 Reresents the minimum number of buckets.
                    sizeof(double) + 2 * sizeof(double));
            }

            /// Returns the index of the bucket in the histogram that contains the
            /// value. This is done by mapping value to [min, max) and then mapping that
            /// value to the corresponding bucket.
            static int64_t
            getIndexForValue(int64_t bucketCount, double min, double max, double value) {
                POLLUX_CHECK(
                    value >= min && value <= max,
                    fmt::format(
                        "Value must be within range: {} [{}, {}]", value, min, max));
                return std::min(
                    static_cast<int64_t>((bucketCount * (value - min)) / (max - min)),
                    bucketCount - 1);
            }

            static double getLeftValueForIndex(
                int64_t bucketCount,
                double min,
                double max,
                int64_t index) {
                return min + index * (max - min) / bucketCount;
            }

            static double getRightValueForIndex(
                int64_t bucketCount,
                double min,
                double max,
                int64_t index) {
                return std::min(
                    max, getLeftValueForIndex(bucketCount, min, max, index + 1));
            }

            void validateParameters(int64_t bucketCount, double min, double max) {
                POLLUX_CHECK_LE(bucketCount, weights_.max_size());

                if (bucketCount < 2) {
                    POLLUX_USER_FAIL("Bucket count must be at least 2.0");
                }

                if (min >= max) {
                    POLLUX_USER_FAIL("Min must be less than max. Min: {}, max: {}", min, max);
                }
            }

            static constexpr double kMinPredictionValue = 0.0;
            static constexpr double kMaxPredictionValue = 1.0;
            static constexpr uint8_t kSerializationVersionHeader = 1;
            std::vector<double, StlAllocator<double> > weights_;
            double totalWeights_{0};
            int64_t bucketCount_{-1};
            double min_{0};
            double max_{1.0};
            int64_t maxUsedIndex_{-1};
        };

        template<ClassificationType type>
        struct Accumulator {
            explicit Accumulator(HashStringAllocator *allocator)
                : trueWeights_(allocator), falseWeights_(allocator) {
            }

            void
            setWeights(int64_t bucketCount, bool outcome, double pred, double weight) {
                POLLUX_CHECK_EQ(bucketCount, trueWeights_.bucketCount());
                POLLUX_CHECK_EQ(bucketCount, falseWeights_.bucketCount());

                /// Similar to Java Presto, the max prediction value for the histogram
                /// is set to be 0.99999999999 in order to ensure bin corresponding to 1
                /// is not reached.
                static const double kMaxPredictionValue = 0.99999999999;
                pred = std::min(pred, kMaxPredictionValue);
                outcome ? trueWeights_.add(pred, weight) : falseWeights_.add(pred, weight);
            }

            void tryInit(int64_t bucketCount) {
                trueWeights_.tryInit(bucketCount);
                falseWeights_.tryInit(bucketCount);
            }

            vector_size_t size() const {
                return trueWeights_.size();
            }

            size_t serialize(char *output) const {
                size_t bytes = trueWeights_.serialize(output);
                return bytes + falseWeights_.serialize(output + bytes);
            }

            size_t serializationSize() const {
                return trueWeights_.serializationSize() + falseWeights_.serializationSize();
            }

            void mergeWith(StringView serialized) {
                auto input = serialized.data();
                POLLUX_CHECK_EQ(serialized.size() % 2, 0);
                const size_t bufferSize = serialized.size() / 2;
                trueWeights_.mergeWith(input, bufferSize);
                falseWeights_.mergeWith(input + serialized.size() / 2, bufferSize);
            }

            void extractValues(FlatVector<double> *flatResult, vector_size_t offset) {
                const double totalTrueWeight = trueWeights_.totalWeights();
                const double totalFalseWeight = falseWeights_.totalWeights();

                double runningFalseWeight = 0;
                double runningTrueWeight = 0;
                int64_t trueWeightIndex = 0;
                while (trueWeightIndex < trueWeights_.bucketCount() &&
                       totalTrueWeight > runningTrueWeight) {
                    auto trueBucketResult = trueWeights_.getBucket(trueWeightIndex);
                    auto falseBucketResult = falseWeights_.getBucket(trueWeightIndex);

                    const double falsePositive = totalFalseWeight - runningFalseWeight;
                    const double negative = totalFalseWeight;

                    if constexpr (type == ClassificationType::kFallout) {
                        flatResult->set(offset + trueWeightIndex, falsePositive / negative);
                    } else if constexpr (type == ClassificationType::kPrecision) {
                        const double truePositive = (totalTrueWeight - runningTrueWeight);
                        const double totalPositives = truePositive + falsePositive;
                        flatResult->set(
                            offset + trueWeightIndex, truePositive / totalPositives);
                    } else if constexpr (type == ClassificationType::kRecall) {
                        const double truePositive = (totalTrueWeight - runningTrueWeight);
                        flatResult->set(
                            offset + trueWeightIndex, truePositive / totalTrueWeight);
                    } else if constexpr (type == ClassificationType::kMissRate) {
                        flatResult->set(
                            offset + trueWeightIndex, runningTrueWeight / totalTrueWeight);
                    } else if constexpr (type == ClassificationType::kThresholds) {
                        flatResult->set(offset + trueWeightIndex, trueBucketResult.left);
                    } else {
                        POLLUX_UNREACHABLE("Not expected to be called.");
                    }

                    runningTrueWeight += trueBucketResult.weight;
                    runningFalseWeight += falseBucketResult.weight;
                    trueWeightIndex += 1;
                }
            }

        private:
            FixedDoubleHistogram trueWeights_;
            FixedDoubleHistogram falseWeights_;
        };

        template<ClassificationType type>
        class ClassificationAggregation : public exec::Aggregate {
        public:
            explicit ClassificationAggregation(
                TypePtr resultType,
                bool useDefaultWeight = false)
                : Aggregate(std::move(resultType)), useDefaultWeight_(useDefaultWeight) {
            }

            int32_t accumulatorFixedWidthSize() const override {
                return sizeof(Accumulator<type>);
            }

            bool isFixedSize() const override {
                return false;
            }

            void addSingleGroupRawInput(
                char *group,
                const SelectivityVector &rows,
                const std::vector<VectorPtr> &args,
                bool /*mayPushdown*/) override {
                decodeArguments(rows, args);
                auto accumulator = value<Accumulator<type> >(group);

                auto tracker = trackRowSize(group);
                rows.applyToSelected([&](auto row) {
                    if (decodedBuckets_.is_null_at(row) || decodedOutcome_.is_null_at(row) ||
                        decodedPred_.is_null_at(row) ||
                        (!useDefaultWeight_ && decodedWeight_.is_null_at(row))) {
                        return;
                    }
                    clearNull(group);
                    accumulator->tryInit(decodedBuckets_.value_at<int64_t>(row));
                    accumulator->setWeights(
                        decodedBuckets_.value_at<int64_t>(row),
                        decodedOutcome_.value_at<bool>(row),
                        decodedPred_.value_at<double>(row),
                        useDefaultWeight_ ? 1.0 : decodedWeight_.value_at<double>(row));
                });
            }

            // Step 4.
            void addRawInput(
                char **groups,
                const SelectivityVector &rows,
                const std::vector<VectorPtr> &args,
                bool /*mayPushdown*/) override {
                decodeArguments(rows, args);

                rows.applyToSelected([&](vector_size_t row) {
                    if (decodedBuckets_.is_null_at(row) || decodedOutcome_.is_null_at(row) ||
                        decodedPred_.is_null_at(row) ||
                        (!useDefaultWeight_ && decodedWeight_.is_null_at(row))) {
                        return;
                    }

                    auto &group = groups[row];
                    auto tracker = trackRowSize(group);

                    clearNull(group);
                    auto *accumulator = value<Accumulator<type> >(group);
                    accumulator->tryInit(decodedBuckets_.value_at<int64_t>(row));

                    accumulator->setWeights(
                        decodedBuckets_.value_at<int64_t>(row),
                        decodedOutcome_.value_at<bool>(row),
                        decodedPred_.value_at<double>(row),
                        useDefaultWeight_ ? 1.0 : decodedWeight_.value_at<double>(row));
                });
            }

            // Step 5.
            void extractAccumulators(char **groups, int32_t numGroups, VectorPtr *result)
            override {
                POLLUX_CHECK(result);
                auto flatResult = (*result)->as_flat_vector<StringView>();
                flatResult->resize(numGroups);

                uint64_t *rawNulls = nullptr;
                if (flatResult->may_have_nulls()) {
                    BufferPtr &nulls = flatResult->mutable_nulls(flatResult->size());
                    rawNulls = nulls->asMutable<uint64_t>();
                }

                for (auto i = 0; i < numGroups; ++i) {
                    auto group = groups[i];
                    if (isNull(group)) {
                        flatResult->set_null(i, true);
                        continue;
                    }

                    if (rawNulls) {
                        bits::clearBit(rawNulls, i);
                    }
                    auto accumulator = value<Accumulator<type> >(group);
                    auto serializationSize = accumulator->serializationSize();
                    char *rawBuffer =
                            flatResult->getRawStringBufferWithSpace(serializationSize);

                    POLLUX_CHECK_EQ(accumulator->serialize(rawBuffer), serializationSize);
                    auto sv = StringView(rawBuffer, serializationSize);
                    flatResult->set(i, std::move(sv));
                }
            }

            void extractValues(char **groups, int32_t numGroups, VectorPtr *result)
            override {
                auto vector = (*result)->as<ArrayVector>();
                POLLUX_CHECK(vector);
                vector->resize(numGroups);

                vector_size_t numValues = 0;
                uint64_t *rawNulls = getRawNulls(result->get());

                for (auto i = 0; i < numGroups; ++i) {
                    auto *group = groups[i];
                    auto *accumulator = value<Accumulator<type> >(group);
                    const auto size = accumulator->size();
                    if (isNull(group)) {
                        clearNull(rawNulls, i);
                        continue;
                    }

                    clearNull(rawNulls, i);
                    numValues += size;
                }

                auto flatResults = vector->elements()->as_flat_vector<double>();
                flatResults->resize(numValues);

                auto *rawOffsets = vector->offsets()->asMutable<vector_size_t>();
                auto *rawSizes = vector->sizes()->asMutable<vector_size_t>();

                vector_size_t offset = 0;
                for (auto i = 0; i < numGroups; ++i) {
                    auto *group = groups[i];

                    if (isNull(group)) {
                        continue;
                    }
                    auto *accumulator = value<Accumulator<type> >(group);
                    const vector_size_t size = accumulator->size();

                    rawOffsets[i] = offset;
                    rawSizes[i] = size;

                    accumulator->extractValues(flatResults, offset);

                    offset += size;
                }
            }

            void addIntermediateResults(
                char **groups,
                const SelectivityVector &rows,
                const std::vector<VectorPtr> &args,
                bool /*mayPushdown*/) override {
                POLLUX_CHECK_EQ(args.size(), 1);
                decodedAcc_.decode(*args[0], rows);

                rows.applyToSelected([&](auto row) {
                    if (decodedAcc_.is_null_at(row)) {
                        return;
                    }

                    auto group = groups[row];
                    auto tracker = trackRowSize(group);
                    clearNull(group);

                    auto serialized = decodedAcc_.value_at<StringView>(row);

                    auto accumulator = value<Accumulator<type> >(group);
                    accumulator->mergeWith(serialized);
                });
            }

            void addSingleGroupIntermediateResults(
                char *group,
                const SelectivityVector &rows,
                const std::vector<VectorPtr> &args,
                bool /* mayPushdown */) override {
                POLLUX_CHECK_EQ(args.size(), 1);
                decodedAcc_.decode(*args[0], rows);
                auto tracker = trackRowSize(group);

                rows.applyToSelected([&](auto row) {
                    if (decodedAcc_.is_null_at(row)) {
                        return;
                    }

                    clearNull(group);

                    auto serialized = decodedAcc_.value_at<StringView>(row);

                    auto accumulator = value<Accumulator<type> >(group);
                    accumulator->mergeWith(serialized);
                });
            }

        protected:
            void initializeNewGroupsInternal(
                char **groups,
                melon::Range<const vector_size_t *> indices) override {
                exec::Aggregate::setAllNulls(groups, indices);
                for (auto i: indices) {
                    auto group = groups[i];
                    new(group + offset_) Accumulator<type>(allocator_);
                }
            }

            void destroyInternal(melon::Range<char **> groups) override {
                destroyAccumulators<Accumulator<type> >(groups);
            }

        private:
            void decodeArguments(
                const SelectivityVector &rows,
                const std::vector<VectorPtr> &args) {
                decodedBuckets_.decode(*args[0], rows, true);
                decodedOutcome_.decode(*args[1], rows, true);
                decodedPred_.decode(*args[2], rows, true);
                if (!useDefaultWeight_) {
                    decodedWeight_.decode(*args[3], rows, true);
                }
            }

            DecodedVector decodedAcc_;
            DecodedVector decodedBuckets_;
            DecodedVector decodedOutcome_;
            DecodedVector decodedPred_;
            DecodedVector decodedWeight_;
            const bool useDefaultWeight_{false};
        };

        template<ClassificationType T>
        void registerAggregateFunctionImpl(
            const std::string &name,
            bool withCompanionFunctions,
            bool overwrite,
            const std::vector<std::shared_ptr<exec::AggregateFunctionSignature> > &
            signatures) {
            exec::registerAggregateFunction(
                name,
                signatures,
                [](core::AggregationNode::Step,
                   const std::vector<TypePtr> &args,
                   const TypePtr &resultType,
                   const core::QueryConfig & /*config*/)
            -> std::unique_ptr<exec::Aggregate> {
                    if (args.size() == 4) {
                        return std::make_unique<ClassificationAggregation<T> >(resultType);
                    } else {
                        return std::make_unique<ClassificationAggregation<T> >(
                            resultType, true);
                    }
                },
                withCompanionFunctions,
                overwrite);
        }
    } // namespace

    void registerClassificationFunctions(
        const std::string &prefix,
        bool withCompanionFunctions,
        bool overwrite) {
        const auto signatures =
                std::vector<std::shared_ptr<exec::AggregateFunctionSignature> >{
                    exec::AggregateFunctionSignatureBuilder()
                    .returnType("array(double)")
                    .intermediateType("varbinary")
                    .argumentType("bigint")
                    .argumentType("boolean")
                    .argumentType("double")
                    .build(),
                    exec::AggregateFunctionSignatureBuilder()
                    .returnType("array(double)")
                    .intermediateType("varbinary")
                    .argumentType("bigint")
                    .argumentType("boolean")
                    .argumentType("double")
                    .argumentType("double")
                    .build()
                };
        registerAggregateFunctionImpl<ClassificationType::kFallout>(
            prefix + kClassificationFallout,
            withCompanionFunctions,
            overwrite,
            signatures);
        registerAggregateFunctionImpl<ClassificationType::kPrecision>(
            prefix + kClassificationPrecision,
            withCompanionFunctions,
            overwrite,
            signatures);
        registerAggregateFunctionImpl<ClassificationType::kRecall>(
            prefix + kClassificationRecall,
            withCompanionFunctions,
            overwrite,
            signatures);
        registerAggregateFunctionImpl<ClassificationType::kMissRate>(
            prefix + kClassificationMissRate,
            withCompanionFunctions,
            overwrite,
            signatures);
        registerAggregateFunctionImpl<ClassificationType::kThresholds>(
            prefix + kClassificationThreshold,
            withCompanionFunctions,
            overwrite,
            signatures);
    }
} // namespace kumo::pollux::aggregate::prestosql
