// Copyright (C) Kumo inc. and its affiliates.
// Author: Jeff.li lijippy@163.com
// All rights reserved.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program.  If not, see <https://www.gnu.org/licenses/>.
//
#include <pollux/serializers/presto_vector_lexer.h>

#include <pollux/serializers/presto_header.h>
#include <pollux/serializers/presto_serializer_deserialization_utils.h>
#include <pollux/serializers/presto_serializer_serialization_utils.h>

namespace kumo::pollux::serializer::presto::detail {
    Status PrestoVectorLexer::lex(std::vector<Token> &out) && {
        POLLUX_RETURN_NOT_OK(lexHeader());

        int32_t numColumns = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_COLUMNS, &numColumns));

        for (int32_t col = 0; col < numColumns; ++col) {
            POLLUX_RETURN_NOT_OK(lexColumn());
        }

        POLLUX_RETURN_IF(
            !source_.empty(), Status::Invalid("Source not fully consumed"));

        out = std::move(tokens_);
        return Status::OK();
    }

    Status PrestoVectorLexer::lexHeader() {
        assertCommitted();

        const auto header = PrestoHeader::read(&source_);
        POLLUX_RETURN_IF(
            !header.has_value(), Status::Invalid("PrestoPage header invalid"));
        POLLUX_RETURN_IF(
            isCompressedBitSet(header->pageCodecMarker),
            Status::Invalid("Compression is not supported"));
        POLLUX_RETURN_IF(
            isEncryptedBitSet(header->pageCodecMarker),
            Status::Invalid("Encryption is not supported"));
        POLLUX_RETURN_IF(
            header->uncompressedSize != header->compressedSize,
            Status::Invalid(
                "Compressed size must match uncompressed size: {} != {}",
                header->uncompressedSize,
                header->compressedSize));
        POLLUX_RETURN_IF(
            header->uncompressedSize != source_.size(),
            Status::Invalid(
                "Uncompressed size does not match content size: {} != {}",
                header->uncompressedSize,
                source_.size()));

        commit(TokenType::HEADER);

        return Status::OK();
    }

    Status PrestoVectorLexer::lexColumEncoding(std::string &out) {
        assertCommitted();
        // Don't use readLengthPrefixedString because it doesn't validate the length
        int32_t encodingLength = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::COLUMN_ENCODING, &encodingLength));
        // Control encoding length to avoid large allocations
        POLLUX_RETURN_IF(
            encodingLength < 0 || encodingLength > 100,
            Status::Invalid("Invalid column encoding length: {}", encodingLength));

        std::string encoding;
        encoding.resize(encodingLength);
        POLLUX_RETURN_NOT_OK(
            lexBytes(encodingLength, TokenType::COLUMN_ENCODING, encoding.data()));

        out = std::move(encoding);

        return Status::OK();
    }

    Status PrestoVectorLexer::lexColumn() {
        std::string encoding;
        POLLUX_RETURN_NOT_OK(lexColumEncoding(encoding));

        if (encoding == kByteArray) {
            POLLUX_RETURN_NOT_OK(lexFixedArray<int8_t>(TokenType::BYTE_ARRAY));
        } else if (encoding == kShortArray) {
            POLLUX_RETURN_NOT_OK(lexFixedArray<int16_t>(TokenType::SHORT_ARRAY));
        } else if (encoding == kIntArray) {
            POLLUX_RETURN_NOT_OK(lexFixedArray<int32_t>(TokenType::INT_ARRAY));
        } else if (encoding == kLongArray) {
            POLLUX_RETURN_NOT_OK(lexFixedArray<int64_t>(TokenType::LONG_ARRAY));
        } else if (encoding == kInt128Array) {
            POLLUX_RETURN_NOT_OK(lexFixedArray<int128_t>(TokenType::INT128_ARRAY));
        } else if (encoding == kVariableWidth) {
            POLLUX_RETURN_NOT_OK(lexVariableWidth());
        } else if (encoding == kArray) {
            POLLUX_RETURN_NOT_OK(lexArray());
        } else if (encoding == kMap) {
            POLLUX_RETURN_NOT_OK(lexMap());
        } else if (encoding == kRow) {
            POLLUX_RETURN_NOT_OK(lexRow());
        } else if (encoding == kDictionary) {
            POLLUX_RETURN_NOT_OK(lexDictionary());
        } else if (encoding == kRLE) {
            POLLUX_RETURN_NOT_OK(lexRLE());
        } else {
            return Status::Invalid("Unknown encoding: {}", encoding);
        }

        return Status::OK();
    }

    Status PrestoVectorLexer::lexVariableWidth() {
        int32_t numRows = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_ROWS, &numRows));
        const auto numOffsetBytes = numRows * sizeof(int32_t);
        POLLUX_RETURN_NOT_OK(lexBytes(numOffsetBytes, TokenType::OFFSETS));
        POLLUX_RETURN_NOT_OK(lexNulls(numRows));
        int32_t dataBytes = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::VARIABLE_WIDTH_DATA_SIZE, &dataBytes));
        POLLUX_RETURN_NOT_OK(lexBytes(dataBytes, TokenType::VARIABLE_WIDTH_DATA));
        return Status::OK();
    }

    Status PrestoVectorLexer::lexArray() {
        POLLUX_RETURN_NOT_OK(lexColumn());
        int32_t numRows;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_ROWS, &numRows));
        const auto offsetBytes = (numRows + 1) * sizeof(int32_t);
        POLLUX_RETURN_NOT_OK(lexBytes(offsetBytes, TokenType::OFFSETS));
        POLLUX_RETURN_NOT_OK(lexNulls(numRows));
        return Status::OK();
    }

    Status PrestoVectorLexer::lexMap() {
        // Key column
        POLLUX_RETURN_NOT_OK(lexColumn());
        // Value column
        POLLUX_RETURN_NOT_OK(lexColumn());
        int32_t hashTableBytes = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::HASH_TABLE_SIZE, &hashTableBytes));
        if (hashTableBytes != -1) {
            POLLUX_RETURN_NOT_OK(lexBytes(hashTableBytes, TokenType::HASH_TABLE));
        }
        int32_t numRows;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_ROWS, &numRows));
        const auto offsetBytes = (numRows + 1) * sizeof(int32_t);
        POLLUX_RETURN_NOT_OK(lexBytes(offsetBytes, TokenType::OFFSETS));
        POLLUX_RETURN_NOT_OK(lexNulls(numRows));
        return Status::OK();
    }

    Status PrestoVectorLexer::lexRow() {
        int32_t numFields = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_FIELDS, &numFields));
        for (int32_t field = 0; field < numFields; ++field) {
            POLLUX_RETURN_NOT_OK(lexColumn());
        }
        int32_t numRows;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_ROWS, &numRows));
        const auto offsetBytes = (numRows + 1) * sizeof(int32_t);
        POLLUX_RETURN_NOT_OK(lexBytes(offsetBytes, TokenType::OFFSETS));
        POLLUX_RETURN_NOT_OK(lexNulls(numRows));

        return Status::OK();
    }

    Status PrestoVectorLexer::lexDictionary() {
        int32_t numRows{0};
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NUM_ROWS, &numRows));
        // Dictionary column
        POLLUX_RETURN_NOT_OK(lexColumn());
        const auto indicesBytes = numRows * sizeof(int32_t);
        POLLUX_RETURN_NOT_OK(lexBytes(indicesBytes, TokenType::DICTIONARY_INDICES));
        // Dictionary ID
        POLLUX_RETURN_NOT_OK(lexBytes(24, TokenType::DICTIONARY_ID));
        return Status::OK();
    }

    Status PrestoVectorLexer::lexRLE() {
        // Num rows
        POLLUX_RETURN_NOT_OK(lexInt<int32_t>(TokenType::NUM_ROWS));
        // RLE length one column
        POLLUX_RETURN_NOT_OK(lexColumn());
        return Status::OK();
    }

    Status PrestoVectorLexer::lexNulls(int32_t &numRows) {
        assertCommitted();
        POLLUX_RETURN_IF(
            numRows < 0, Status::Invalid("Negative num rows: {}", numRows));

        int8_t hasNulls = 0;
        POLLUX_RETURN_NOT_OK(lexInt(TokenType::NULLS, &hasNulls));
        if (hasNulls != 0) {
            const auto numBytes = bits::nbytes(numRows);
            POLLUX_RETURN_IF(
                numBytes > source_.size(),
                Status::Invalid(
                    "More rows than bytes in source: {} > {}",
                    numRows,
                    source_.size()));
            if (nullsBuffer_.size() < numBytes) {
                constexpr auto eltBytes = sizeof(nullsBuffer_[0]);
                nullsBuffer_.resize(bits::roundUp(numBytes, eltBytes) / eltBytes);
            }
            auto *nulls = nullsBuffer_.data();
            POLLUX_RETURN_NOT_OK(
                lexBytes(numBytes, TokenType::NULLS, reinterpret_cast<char*>(nulls)));

            bits::reverseBits(reinterpret_cast<uint8_t *>(nulls), numBytes);
            const auto numNulls = bits::countBits(nulls, 0, numRows);

            numRows -= numNulls;
        }
        commit(TokenType::NULLS);
        return Status::OK();
    }

    Status
    PrestoVectorLexer::lexBytes(int32_t numBytes, TokenType tokenType, char *dst) {
        assertCommitted();
        POLLUX_RETURN_IF(
            numBytes < 0,
            Status::Invalid("Attempting to read negative numBytes: {}", numBytes));
        POLLUX_RETURN_IF(
            numBytes > source_.size(),
            Status::Invalid(
                "Attempting to read more bytes than in source: {} > {}",
                numBytes,
                source_.size()));
        if (dst != nullptr) {
            std::copy(source_.begin(), source_.begin() + numBytes, dst);
        }
        source_.remove_prefix(numBytes);
        commit(tokenType);
        return Status::OK();
    }

    void PrestoVectorLexer::commit(TokenType tokenType) {
        const auto newPtr = source_.begin();
        assert(committedPtr_ <= newPtr);
        assert(
            int64_t(newPtr - committedPtr_) <=
            int64_t(std::numeric_limits<uint32_t>::max()));
        if (newPtr != committedPtr_) {
            const uint32_t length = uint32_t(newPtr - committedPtr_);
            if (!tokens_.empty() && tokens_.back().tokenType == tokenType) {
                tokens_.back().length += length;
            } else {
                Token token;
                token.tokenType = tokenType;
                token.length = length;
                tokens_.push_back(token);
            }
        }
        committedPtr_ = newPtr;
    }
} // namespace kumo::pollux::serializer::presto::detail
