// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.

#include "data_type_jsonb_serde.h"

#include <rapidjson/document.h>
#include <rapidjson/stringbuffer.h>
#include <rapidjson/writer.h>

#include <cstddef>
#include <cstdint>
#include <memory>

#include "arrow/array/builder_binary.h"
#include "common/exception.h"
#include "common/status.h"
#include "exprs/json_functions.h"
#include "runtime/jsonb_value.h"
#include "util/jsonb_parser_simd.h"
namespace doris {
namespace vectorized {
#include "common/compile_check_begin.h"

Status DataTypeJsonbSerDe::write_column_to_mysql_binary(const IColumn& column,
                                                        MysqlRowBinaryBuffer& result,
                                                        int64_t row_idx, bool col_const) const {
    auto& data = assert_cast<const ColumnString&>(column);
    const auto col_index = index_check_const(row_idx, col_const);
    const auto jsonb_val = data.get_data_at(col_index);
    // jsonb size == 0 is NULL
    if (jsonb_val.data == nullptr || jsonb_val.size == 0) {
        if (UNLIKELY(0 != result.push_null())) {
            return Status::InternalError("pack mysql buffer failed.");
        }
    } else {
        std::string json_str = JsonbToJson::jsonb_to_json_string(jsonb_val.data, jsonb_val.size);
        if (UNLIKELY(0 != result.push_string(json_str.c_str(), json_str.size()))) {
            return Status::InternalError("pack mysql buffer failed.");
        }
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::serialize_column_to_json(const IColumn& column, int64_t start_idx,
                                                    int64_t end_idx, BufferWritable& bw,
                                                    FormatOptions& options) const {
    SERIALIZE_COLUMN_TO_JSON();
}

Status DataTypeJsonbSerDe::serialize_one_cell_to_json(const IColumn& column, int64_t row_num,
                                                      BufferWritable& bw,
                                                      FormatOptions& options) const {
    auto result = check_column_const_set_readability(column, row_num);
    ColumnPtr ptr = result.first;
    row_num = result.second;

    const StringRef& s = assert_cast<const ColumnString&>(*ptr).get_data_at(row_num);
    if (s.size > 0) {
        std::string str = JsonbToJson::jsonb_to_json_string(s.data, s.size);
        bw.write(str.c_str(), str.size());
    } else {
        bw.write(NULL_IN_CSV_FOR_ORDINARY_TYPE.c_str(),
                 strlen(NULL_IN_CSV_FOR_ORDINARY_TYPE.c_str()));
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::deserialize_column_from_json_vector(IColumn& column,
                                                               std::vector<Slice>& slices,
                                                               uint64_t* num_deserialized,
                                                               const FormatOptions& options) const {
    DESERIALIZE_COLUMN_FROM_JSON_VECTOR();
    return Status::OK();
}

Status DataTypeJsonbSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice,
                                                          const FormatOptions& options) const {
    JsonBinaryValue value;
    RETURN_IF_ERROR(value.from_json_string(slice.data, slice.size));

    auto& column_string = assert_cast<ColumnString&>(column);
    column_string.insert_data(value.value(), value.size());
    return Status::OK();
}

Status DataTypeJsonbSerDe::write_column_to_arrow(const IColumn& column, const NullMap* null_map,
                                                 arrow::ArrayBuilder* array_builder, int64_t start,
                                                 int64_t end, const cctz::time_zone& ctz) const {
    const auto& string_column = assert_cast<const ColumnString&>(column);
    auto& builder = assert_cast<arrow::StringBuilder&>(*array_builder);
    for (size_t string_i = start; string_i < end; ++string_i) {
        if (null_map && (*null_map)[string_i]) {
            RETURN_IF_ERROR(checkArrowStatus(builder.AppendNull(), column.get_name(),
                                             array_builder->type()->name()));
            continue;
        }
        std::string_view string_ref = string_column.get_data_at(string_i).to_string_view();
        std::string json_string =
                JsonbToJson::jsonb_to_json_string(string_ref.data(), string_ref.size());
        RETURN_IF_ERROR(
                checkArrowStatus(builder.Append(json_string.data(),
                                                cast_set<int, size_t, false>(json_string.size())),
                                 column.get_name(), array_builder->type()->name()));
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::read_column_from_arrow(IColumn& column, const arrow::Array* arrow_array,
                                                  int64_t start, int64_t end,
                                                  const cctz::time_zone& ctz) const {
    if (arrow_array->type_id() == arrow::Type::STRING ||
        arrow_array->type_id() == arrow::Type::BINARY) {
        const auto* concrete_array = dynamic_cast<const arrow::BinaryArray*>(arrow_array);
        std::shared_ptr<arrow::Buffer> buffer = concrete_array->value_data();

        const uint8_t* offsets_data = concrete_array->value_offsets()->data();
        const size_t offset_size = sizeof(int32_t);

        JsonBinaryValue value;
        for (auto offset_i = start; offset_i < end; ++offset_i) {
            if (!concrete_array->IsNull(offset_i)) {
                int32_t start_offset = 0;
                int32_t end_offset = 0;
                memcpy(&start_offset, offsets_data + offset_i * offset_size, offset_size);
                memcpy(&end_offset, offsets_data + (offset_i + 1) * offset_size, offset_size);

                int32_t length = end_offset - start_offset;
                const auto* raw_data = buffer->data() + start_offset;

                RETURN_IF_ERROR(
                        value.from_json_string(reinterpret_cast<const char*>(raw_data), length));
                column.insert_data(value.value(), value.size());
            } else {
                column.insert_default();
            }
        }
    } else if (arrow_array->type_id() == arrow::Type::FIXED_SIZE_BINARY) {
        const auto* concrete_array = dynamic_cast<const arrow::FixedSizeBinaryArray*>(arrow_array);
        uint32_t width = concrete_array->byte_width();
        const auto* array_data = concrete_array->GetValue(start);

        JsonBinaryValue value;
        for (size_t offset_i = 0; offset_i < end - start; ++offset_i) {
            if (!concrete_array->IsNull(offset_i)) {
                const auto* raw_data = array_data + (offset_i * width);

                RETURN_IF_ERROR(
                        value.from_json_string(reinterpret_cast<const char*>(raw_data), width));
                column.insert_data(value.value(), value.size());
            } else {
                column.insert_default();
            }
        }
    } else if (arrow_array->type_id() == arrow::Type::LARGE_STRING ||
               arrow_array->type_id() == arrow::Type::LARGE_BINARY) {
        const auto* concrete_array = dynamic_cast<const arrow::LargeBinaryArray*>(arrow_array);
        std::shared_ptr<arrow::Buffer> buffer = concrete_array->value_data();

        JsonBinaryValue value;
        for (auto offset_i = start; offset_i < end; ++offset_i) {
            if (!concrete_array->IsNull(offset_i)) {
                const auto* raw_data = buffer->data() + concrete_array->value_offset(offset_i);

                RETURN_IF_ERROR(value.from_json_string(reinterpret_cast<const char*>(raw_data),
                                                       concrete_array->value_length(offset_i)));
                column.insert_data(value.value(), value.size());
            } else {
                column.insert_default();
            }
        }
    } else {
        return Status::InvalidArgument("Unsupported arrow type for json column: {}",
                                       arrow_array->type_id());
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::write_column_to_orc(const std::string& timezone, const IColumn& column,
                                               const NullMap* null_map,
                                               orc::ColumnVectorBatch* orc_col_batch, int64_t start,
                                               int64_t end, vectorized::Arena& arena) const {
    auto* cur_batch = dynamic_cast<orc::StringVectorBatch*>(orc_col_batch);
    const auto& string_column = assert_cast<const ColumnString&>(column);
    // First pass: calculate total memory needed and collect serialized values
    std::vector<std::string> serialized_values;
    std::vector<size_t> valid_row_indices;
    size_t total_size = 0;
    for (size_t row_id = start; row_id < end; row_id++) {
        if (cur_batch->notNull[row_id] == 1) {
            std::string_view string_ref = string_column.get_data_at(row_id).to_string_view();
            auto serialized_value =
                    JsonbToJson::jsonb_to_json_string(string_ref.data(), string_ref.size());
            serialized_values.push_back(std::move(serialized_value));
            size_t len = serialized_values.back().length();
            total_size += len;
            valid_row_indices.push_back(row_id);
        }
    }
    // Allocate continues memory based on calculated size
    char* ptr = arena.alloc(total_size);
    if (!ptr) {
        return Status::InternalError(
                "malloc memory {} error when write variant column data to orc file.", total_size);
    }
    // Second pass: copy data to allocated memory
    size_t offset = 0;
    for (size_t i = 0; i < serialized_values.size(); i++) {
        const auto& serialized_value = serialized_values[i];
        size_t row_id = valid_row_indices[i];
        size_t len = serialized_value.length();
        if (offset + len > total_size) {
            return Status::InternalError(
                    "Buffer overflow when writing column data to ORC file. offset {} with len {} "
                    "exceed total_size {} . ",
                    offset, len, total_size);
        }
        memcpy(ptr + offset, serialized_value.data(), len);
        cur_batch->data[row_id] = ptr + offset;
        cur_batch->length[row_id] = len;
        offset += len;
    }
    cur_batch->numElements = end - start;
    return Status::OK();
}

Status DataTypeJsonbSerDe::write_column_to_pb(const IColumn& column, PValues& result, int64_t start,
                                              int64_t end) const {
    const auto& string_column = assert_cast<const ColumnString&>(column);
    result.mutable_string_value()->Reserve(cast_set<int>(end - start));
    auto* ptype = result.mutable_type();
    ptype->set_id(PGenericType::JSONB);
    for (size_t row_num = start; row_num < end; ++row_num) {
        const auto& string_ref = string_column.get_data_at(row_num);
        if (string_ref.size > 0) {
            result.add_string_value(
                    JsonbToJson::jsonb_to_json_string(string_ref.data, string_ref.size));
        } else {
            result.add_string_value(NULL_IN_CSV_FOR_ORDINARY_TYPE);
        }
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::read_column_from_pb(IColumn& column, const PValues& arg) const {
    auto& column_string = assert_cast<ColumnString&>(column);
    column_string.reserve(column_string.size() + arg.string_value_size());
    JsonBinaryValue value;
    for (int i = 0; i < arg.string_value_size(); ++i) {
        RETURN_IF_ERROR(value.from_json_string(arg.string_value(i)));
        column_string.insert_data(value.value(), value.size());
    }
    return Status::OK();
}

void convert_jsonb_to_rapidjson(const JsonbValue& val, rapidjson::Value& target,
                                rapidjson::Document::AllocatorType& allocator) {
    // convert type of jsonb to rapidjson::Value
    switch (val.type) {
    case JsonbType::T_True:
        target.SetBool(true);
        break;
    case JsonbType::T_False:
        target.SetBool(false);
        break;
    case JsonbType::T_Null:
        target.SetNull();
        break;
    case JsonbType::T_Float:
        target.SetFloat(val.unpack<JsonbFloatVal>()->val());
        break;
    case JsonbType::T_Double:
        target.SetDouble(val.unpack<JsonbDoubleVal>()->val());
        break;
    case JsonbType::T_Int64:
        target.SetInt64(val.unpack<JsonbInt64Val>()->val());
        break;
    case JsonbType::T_Int32:
        target.SetInt(val.unpack<JsonbInt32Val>()->val());
        break;
    case JsonbType::T_Int16:
        target.SetInt(val.unpack<JsonbInt16Val>()->val());
        break;
    case JsonbType::T_Int8:
        target.SetInt(val.unpack<JsonbInt8Val>()->val());
        break;
    case JsonbType::T_String:
        target.SetString(val.unpack<JsonbStringVal>()->getBlob(),
                         val.unpack<JsonbStringVal>()->getBlobLen());
        break;
    case JsonbType::T_Array: {
        target.SetArray();
        const ArrayVal& array = *val.unpack<ArrayVal>();
        if (array.numElem() == 0) {
            target.SetNull();
            break;
        }
        target.Reserve(array.numElem(), allocator);
        for (auto it = array.begin(); it != array.end(); ++it) {
            rapidjson::Value array_val;
            convert_jsonb_to_rapidjson(*static_cast<const JsonbValue*>(it), array_val, allocator);
            target.PushBack(array_val, allocator);
        }
        break;
    }
    case JsonbType::T_Object: {
        target.SetObject();
        const ObjectVal& obj = *val.unpack<ObjectVal>();
        for (auto it = obj.begin(); it != obj.end(); ++it) {
            rapidjson::Value obj_val;
            convert_jsonb_to_rapidjson(*it->value(), obj_val, allocator);
            target.AddMember(rapidjson::GenericStringRef(it->getKeyStr(), it->klen()), obj_val,
                             allocator);
        }
        break;
    }
    default:
        CHECK(false) << "unkown type " << static_cast<int>(val.type);
        break;
    }
}

Status DataTypeJsonbSerDe::serialize_column_to_jsonb(const IColumn& from_column, int64_t row_num,
                                                     JsonbWriter& writer) const {
    const auto& jsonb_binary = assert_cast<const ColumnString&>(from_column).get_data_at(row_num);
    const JsonbDocument* doc = nullptr;
    RETURN_IF_ERROR(
            JsonbDocument::checkAndCreateDocument(jsonb_binary.data, jsonb_binary.size, &doc));

    if (!writer.writeValue(doc->getValue())) {
        return Status::InternalError(
                "writeValue failed in DataTypeJsonbSerDe::serialize_column_to_jsonb");
    }
    return Status::OK();
}

Status DataTypeJsonbSerDe::deserialize_column_from_jsonb(IColumn& column,
                                                         const JsonbValue* jsonb_value,
                                                         CastParameters& castParms) const {
    JsonbWriter writer;
    if (!writer.writeValue(jsonb_value)) {
        return Status::InternalError(
                "writeValue failed in DataTypeJsonbSerDe::deserialize_column_from_jsonb");
    }

    auto& col = assert_cast<ColumnString&>(column);
    col.insert_data(writer.getOutput()->getBuffer(), writer.getOutput()->getSize());
    return Status::OK();
}
Status DataTypeJsonbSerDe::from_string(StringRef& str, IColumn& column,
                                       const FormatOptions& options) const {
    auto slice = str.to_slice();
    return deserialize_one_cell_from_json(column, slice, options);
}

void DataTypeJsonbSerDe::write_one_cell_to_binary(const IColumn& src_column,
                                                  ColumnString::Chars& chars,
                                                  int64_t row_num) const {
    const uint8_t type = static_cast<uint8_t>(FieldType::OLAP_FIELD_TYPE_JSONB);
    const auto& col = assert_cast<const ColumnString&>(src_column);
    const auto& data_ref = col.get_data_at(row_num);
    size_t data_size = data_ref.size;

    const size_t old_size = chars.size();
    const size_t new_size = old_size + sizeof(uint8_t) + sizeof(size_t) + data_ref.size;
    chars.resize(new_size);

    memcpy(chars.data() + old_size, reinterpret_cast<const char*>(&type), sizeof(uint8_t));
    memcpy(chars.data() + old_size + sizeof(uint8_t), reinterpret_cast<const char*>(&data_size),
           sizeof(size_t));
    memcpy(chars.data() + old_size + sizeof(uint8_t) + sizeof(size_t), data_ref.data, data_size);
}

const uint8_t* DataTypeJsonbSerDe::deserialize_binary_to_column(const uint8_t* data,
                                                                IColumn& column) {
    auto& col = assert_cast<ColumnString&, TypeCheckOnRelease::DISABLE>(column);
    const size_t data_size = unaligned_load<size_t>(data);
    data += sizeof(size_t);
    col.insert_data(reinterpret_cast<const char*>(data), data_size);
    data += data_size;
    return data;
}

const uint8_t* DataTypeJsonbSerDe::deserialize_binary_to_field(const uint8_t* data, Field& field,
                                                               FieldInfo& info) {
    const size_t data_size = unaligned_load<size_t>(data);
    data += sizeof(size_t);
    field = Field::create_field<TYPE_JSONB>(
            JsonbField(reinterpret_cast<const char*>(data), data_size));
    data += data_size;
    return data;
}

void DataTypeJsonbSerDe::to_string(const IColumn& column, size_t row_num,
                                   BufferWritable& bw) const {
    const auto& col = assert_cast<const ColumnString&, TypeCheckOnRelease::DISABLE>(column);
    const auto& data_ref = col.get_data_at(row_num);
    if (data_ref.size > 0) {
        if (_nesting_level > 1) {
            bw.write('"');
        }
        std::string str = JsonbToJson::jsonb_to_json_string(data_ref.data, data_ref.size);
        bw.write(str.c_str(), str.size());
        if (_nesting_level > 1) {
            bw.write('"');
        }
    } else {
        bw.write("NULL", 4);
    }
}

bool DataTypeJsonbSerDe::write_column_to_presto_text(const IColumn& column, BufferWritable& bw,
                                                     int64_t row_idx) const {
    const auto& col = assert_cast<const ColumnString&, TypeCheckOnRelease::DISABLE>(column);
    const auto& data_ref = col.get_data_at(row_idx);
    if (data_ref.size > 0) {
        std::string str = JsonbToJson::jsonb_to_json_string(data_ref.data, data_ref.size);
        bw.write(str.c_str(), str.size());
    } else {
        bw.write("NULL", 4);
    }
    return true;
}

} // namespace vectorized
} // namespace doris
