// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.

#include <nebula/c/bridge.h>

#include <algorithm>
#include <cerrno>
#include <cstring>
#include <memory>
#include <string>
#include <string_view>
#include <utility>
#include <vector>

#include <nebula/core/array.h>
#include <nebula/core/buffer.h>
#include <nebula/c/helpers.h>
#include <nebula/c/util_internal.h>
#include <nebula/core/extension_type.h>
#include <nebula/core/memory_pool.h>
#include <nebula/core/memory_pool_internal.h>  // for kZeroSizeArea
#include <nebula/core/record_batch.h>
#include <nebula/core/stl_allocator.h>
#include <nebula/types/type_traits.h>
#include <nebula/bits/bit_util.h>
#include <turbo/base/checked_cast.h>
#include <nebula/util/key_value_metadata.h>
#include <turbo/log/logging.h>
#include <nebula/util/range.h>
#include <turbo/container/small_vector.h>
#include <nebula/util/string.h>
#include <nebula/util/value_parsing.h>
#include <nebula/core/visit_type_inline.h>
#include <turbo/log/logging.h>

namespace nebula {
    using turbo::checked_cast;
    using turbo::checked_pointer_cast;

    using turbo::SmallVector;

    using internal::ArrayExportGuard;
    using internal::ArrayExportTraits;
    using internal::SchemaExportGuard;
    using internal::SchemaExportTraits;

    using internal::ToChars;

    using memory_pool::internal::kZeroSizeArea;

    namespace {
        turbo::Status ExportingNotImplemented(const DataType &type) {
            return turbo::unimplemented_error("Exporting ", type.to_string(), " array not supported");
        }

        // Allocate exported private data using MemoryPool,
        // to allow accounting memory and checking for memory leaks.

        // XXX use Gandiva's SimpleArena?

        template<typename Derived>
        struct PoolAllocationMixin {
            static void *operator new(size_t size) {
                DKCHECK_EQ(size, sizeof(Derived));
                uint8_t *data;
                KCHECK(default_memory_pool()->allocate(static_cast<int64_t>(size), &data).ok());
                return data;
            }

            static void operator delete(void *ptr) {
                default_memory_pool()->free(reinterpret_cast<uint8_t *>(ptr), sizeof(Derived));
            }
        };

        //////////////////////////////////////////////////////////////////////////
        // C schema export

        struct ExportedSchemaPrivateData : PoolAllocationMixin<ExportedSchemaPrivateData> {
            std::string format_;
            std::string name_;
            std::string metadata_;
            struct ArrowSchema dictionary_;
            SmallVector<struct ArrowSchema, 1> children_;
            SmallVector<struct ArrowSchema *, 4> child_pointers_;

            ExportedSchemaPrivateData() = default;

            TURBO_DEFAULT_MOVE_AND_ASSIGN(ExportedSchemaPrivateData);

            TURBO_DISALLOW_COPY_AND_ASSIGN(ExportedSchemaPrivateData);
        };

        void ReleaseExportedSchema(struct ArrowSchema *schema) {
            if (ArrowSchemaIsReleased(schema)) {
                return;
            }
            for (int64_t i = 0; i < schema->n_children; ++i) {
                struct ArrowSchema *child = schema->children[i];
                ArrowSchemaRelease(child);
                DKCHECK(ArrowSchemaIsReleased(child))
        << "Child release callback should have marked it released";
            }
            struct ArrowSchema *dict = schema->dictionary;
            if (dict != nullptr) {
                ArrowSchemaRelease(dict);
                DKCHECK(ArrowSchemaIsReleased(dict))
        << "Dictionary release callback should have marked it released";
            }
            DKCHECK_NE(schema->private_data, nullptr);
            delete reinterpret_cast<ExportedSchemaPrivateData *>(schema->private_data);

            ArrowSchemaMarkReleased(schema);
        }

        template<typename SizeType>
        turbo::Result<int32_t> DowncastMetadataSize(SizeType size) {
            auto res = static_cast<int32_t>(size);
            if (res < 0 || static_cast<SizeType>(res) != size) {
                return turbo::invalid_argument_error("Metadata too large (more than 2**31 items or bytes)");
            }
            return res;
        }

        turbo::Result<std::string> EncodeMetadata(const KeyValueMetadata &metadata) {
            TURBO_MOVE_OR_RAISE(auto npairs, DowncastMetadataSize(metadata.size()));
            std::string exported;

            // Pre-compute total string size
            size_t total_size = 4;
            for (int32_t i = 0; i < npairs; ++i) {
                total_size += 8 + metadata.key(i).length() + metadata.value(i).length();
            }
            exported.resize(total_size);

            char *data_start = &exported[0];
            char *data = data_start;
            auto write_int32 = [&](int32_t v) -> void {
                memcpy(data, &v, 4);
                data += 4;
            };
            auto write_string = [&](const std::string &s) -> turbo::Status {
                TURBO_MOVE_OR_RAISE(auto len, DowncastMetadataSize(s.length()));
                write_int32(len);
                if (len > 0) {
                    memcpy(data, s.data(), len);
                    data += len;
                }
                return turbo::OkStatus();
            };

            write_int32(npairs);
            for (int32_t i = 0; i < npairs; ++i) {
                TURBO_RETURN_NOT_OK(write_string(metadata.key(i)));
                TURBO_RETURN_NOT_OK(write_string(metadata.value(i)));
            }
            DKCHECK_EQ(static_cast<size_t>(data - data_start), total_size);
            return exported;
        }

        struct SchemaExporter {
            turbo::Status ExportField(const Field &field) {
                export_.name_ = field.name();
                flags_ = field.nullable() ? NEBULA_FLAG_NULLABLE : 0;

                const DataType *type = UnwrapExtension(field.type().get());
                TURBO_RETURN_NOT_OK(ExportFormat(*type));
                TURBO_RETURN_NOT_OK(ExportChildren(type->fields()));
                TURBO_RETURN_NOT_OK(ExportMetadata(field.metadata().get()));
                return turbo::OkStatus();
            }

            turbo::Status ExportType(const DataType &orig_type) {
                flags_ = NEBULA_FLAG_NULLABLE;

                const DataType *type = UnwrapExtension(&orig_type);
                TURBO_RETURN_NOT_OK(ExportFormat(*type));
                TURBO_RETURN_NOT_OK(ExportChildren(type->fields()));
                // There may be additional metadata to export
                TURBO_RETURN_NOT_OK(ExportMetadata(nullptr));
                return turbo::OkStatus();
            }

            turbo::Status ExportSchema(const Schema &schema) {
                static const StructType dummy_struct_type({});
                flags_ = 0;

                TURBO_RETURN_NOT_OK(ExportFormat(dummy_struct_type));
                TURBO_RETURN_NOT_OK(ExportChildren(schema.fields()));
                TURBO_RETURN_NOT_OK(ExportMetadata(schema.metadata().get()));
                return turbo::OkStatus();
            }

            // Finalize exporting by setting C struct fields and allocating
            // autonomous private data for each schema node.
            //
            // This function can't fail, as properly reclaiming memory in case of error
            // would be too fragile.  After this function returns, memory is reclaimed
            // by calling the release() pointer in the top level ArrowSchema struct.
            void Finish(struct ArrowSchema *c_struct) {
                // First, create permanent ExportedSchemaPrivateData
                auto pdata = new ExportedSchemaPrivateData(std::move(export_));

                // Second, finish dictionary and children.
                if (dict_exporter_) {
                    dict_exporter_->Finish(&pdata->dictionary_);
                }
                pdata->child_pointers_.resize(child_exporters_.size(), nullptr);
                for (size_t i = 0; i < child_exporters_.size(); ++i) {
                    auto ptr = pdata->child_pointers_[i] = &pdata->children_[i];
                    child_exporters_[i].Finish(ptr);
                }

                // Third, fill C struct.
                DKCHECK_NE(c_struct, nullptr);
                memset(c_struct, 0, sizeof(*c_struct));

                c_struct->format = pdata->format_.c_str();
                c_struct->name = pdata->name_.c_str();
                c_struct->metadata = pdata->metadata_.empty() ? nullptr : pdata->metadata_.c_str();
                c_struct->flags = flags_;

                c_struct->n_children = static_cast<int64_t>(child_exporters_.size());
                c_struct->children = c_struct->n_children ? pdata->child_pointers_.data() : nullptr;
                c_struct->dictionary = dict_exporter_ ? &pdata->dictionary_ : nullptr;
                c_struct->private_data = pdata;
                c_struct->release = ReleaseExportedSchema;
            }

            const DataType *UnwrapExtension(const DataType *type) {
                if (type->id() == Type::EXTENSION) {
                    const auto &ext_type = checked_cast<const ExtensionType &>(*type);
                    additional_metadata_.reserve(2);
                    additional_metadata_.emplace_back(kExtensionTypeKeyName, ext_type.extension_name());
                    additional_metadata_.emplace_back(kExtensionMetadataKeyName, ext_type.serialize());
                    return ext_type.storage_type().get();
                }
                return type;
            }

            turbo::Status ExportFormat(const DataType &type) {
                if (type.id() == Type::DICTIONARY) {
                    const auto &dict_type = checked_cast<const DictionaryType &>(type);
                    if (dict_type.ordered()) {
                        flags_ |= NEBULA_FLAG_DICTIONARY_ORDERED;
                    }
                    // Dictionary type: parent struct describes index type,
                    // child dictionary struct describes value type.
                    TURBO_RETURN_NOT_OK(visit_type_inline(*dict_type.index_type(), this));
                    dict_exporter_ = std::make_unique<SchemaExporter>();
                    TURBO_RETURN_NOT_OK(dict_exporter_->ExportType(*dict_type.get_value_type()));
                } else {
                    TURBO_RETURN_NOT_OK(visit_type_inline(type, this));
                }
                DKCHECK(!export_.format_.empty());
                return turbo::OkStatus();
            }

            turbo::Status ExportChildren(const std::vector<std::shared_ptr<Field> > &fields) {
                export_.children_.resize(fields.size());
                child_exporters_.resize(fields.size());
                for (size_t i = 0; i < fields.size(); ++i) {
                    TURBO_RETURN_NOT_OK(child_exporters_[i].ExportField(*fields[i]));
                }
                return turbo::OkStatus();
            }

            turbo::Status ExportMetadata(const KeyValueMetadata *orig_metadata) {
                static const KeyValueMetadata empty_metadata;

                if (orig_metadata == nullptr) {
                    orig_metadata = &empty_metadata;
                }
                if (additional_metadata_.empty()) {
                    if (orig_metadata->size() > 0) {
                        TURBO_MOVE_OR_RAISE(export_.metadata_, EncodeMetadata(*orig_metadata));
                    }
                    return turbo::OkStatus();
                }
                // Additional metadata needs to be appended to the existing
                // (for extension types)
                KeyValueMetadata metadata(orig_metadata->keys(), orig_metadata->values());
                for (const auto &kv: additional_metadata_) {
                    // The metadata may already be there => ignore
                    if (metadata.Contains(kv.first)) {
                        continue;
                    }
                    metadata.append(kv.first, kv.second);
                }
                TURBO_MOVE_OR_RAISE(export_.metadata_, EncodeMetadata(metadata));
                return turbo::OkStatus();
            }

            turbo::Status SetFormat(std::string s) {
                export_.format_ = std::move(s);
                return turbo::OkStatus();
            }

            // Type-specific visitors

            turbo::Status Visit(const DataType &type) { return ExportingNotImplemented(type); }

            turbo::Status Visit(const NullType &type) { return SetFormat("n"); }

            turbo::Status Visit(const BooleanType &type) { return SetFormat("b"); }

            turbo::Status Visit(const Int8Type &type) { return SetFormat("c"); }

            turbo::Status Visit(const UInt8Type &type) { return SetFormat("C"); }

            turbo::Status Visit(const Int16Type &type) { return SetFormat("s"); }

            turbo::Status Visit(const UInt16Type &type) { return SetFormat("S"); }

            turbo::Status Visit(const Int32Type &type) { return SetFormat("i"); }

            turbo::Status Visit(const UInt32Type &type) { return SetFormat("I"); }

            turbo::Status Visit(const Int64Type &type) { return SetFormat("l"); }

            turbo::Status Visit(const UInt64Type &type) { return SetFormat("L"); }

            turbo::Status Visit(const Fp16Type &type) { return SetFormat("e"); }

            turbo::Status Visit(const Fp32Type &type) { return SetFormat("f"); }

            turbo::Status Visit(const Fp64Type &type) { return SetFormat("g"); }

            turbo::Status Visit(const FixedSizeBinaryType &type) {
                return SetFormat("w:" + ToChars(type.byte_width()));
            }

            turbo::Status Visit(const DecimalType &type) {
                if (type.bit_width() == 128) {
                    // 128 is the default bit-width
                    return SetFormat("d:" + ToChars(type.precision()) + "," + ToChars(type.scale()));
                } else {
                    return SetFormat("d:" + ToChars(type.precision()) + "," + ToChars(type.scale()) +
                                     "," + ToChars(type.bit_width()));
                }
            }

            turbo::Status Visit(const BinaryType &type) { return SetFormat("z"); }

            turbo::Status Visit(const LargeBinaryType &type) { return SetFormat("Z"); }

            turbo::Status Visit(const BinaryViewType &type) { return SetFormat("vz"); }

            turbo::Status Visit(const StringType &type) { return SetFormat("u"); }

            turbo::Status Visit(const LargeStringType &type) { return SetFormat("U"); }

            turbo::Status Visit(const StringViewType &type) { return SetFormat("vu"); }

            turbo::Status Visit(const Date32Type &type) { return SetFormat("tdD"); }

            turbo::Status Visit(const Date64Type &type) { return SetFormat("tdm"); }

            turbo::Status Visit(const Time32Type &type) {
                switch (type.unit()) {
                    case TimeUnit::SECOND:
                        export_.format_ = "tts";
                        break;
                    case TimeUnit::MILLI:
                        export_.format_ = "ttm";
                        break;
                    default:
                        return turbo::invalid_argument_error("Invalid time unit for Time32: ", type.unit());
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const Time64Type &type) {
                switch (type.unit()) {
                    case TimeUnit::MICRO:
                        export_.format_ = "ttu";
                        break;
                    case TimeUnit::NANO:
                        export_.format_ = "ttn";
                        break;
                    default:
                        return turbo::invalid_argument_error("Invalid time unit for Time64: ", type.unit());
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const TimestampType &type) {
                switch (type.unit()) {
                    case TimeUnit::SECOND:
                        export_.format_ = "tss:";
                        break;
                    case TimeUnit::MILLI:
                        export_.format_ = "tsm:";
                        break;
                    case TimeUnit::MICRO:
                        export_.format_ = "tsu:";
                        break;
                    case TimeUnit::NANO:
                        export_.format_ = "tsn:";
                        break;
                    default:
                        return turbo::invalid_argument_error("Invalid time unit for Timestamp: ", type.unit());
                }
                export_.format_ += type.timezone();
                return turbo::OkStatus();
            }

            turbo::Status Visit(const DurationType &type) {
                switch (type.unit()) {
                    case TimeUnit::SECOND:
                        export_.format_ = "tDs";
                        break;
                    case TimeUnit::MILLI:
                        export_.format_ = "tDm";
                        break;
                    case TimeUnit::MICRO:
                        export_.format_ = "tDu";
                        break;
                    case TimeUnit::NANO:
                        export_.format_ = "tDn";
                        break;
                    default:
                        return turbo::invalid_argument_error("Invalid time unit for Duration: ", type.unit());
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const MonthIntervalType &type) { return SetFormat("tiM"); }

            turbo::Status Visit(const DayTimeIntervalType &type) { return SetFormat("tiD"); }

            turbo::Status Visit(const MonthDayNanoIntervalType &type) { return SetFormat("tin"); }

            turbo::Status Visit(const ListType &type) { return SetFormat("+l"); }

            turbo::Status Visit(const LargeListType &type) { return SetFormat("+L"); }

            turbo::Status Visit(const ListViewType &type) { return SetFormat("+vl"); }

            turbo::Status Visit(const LargeListViewType &type) { return SetFormat("+vL"); }

            turbo::Status Visit(const FixedSizeListType &type) {
                return SetFormat("+w:" + ToChars(type.list_size()));
            }

            turbo::Status Visit(const StructType &type) { return SetFormat("+s"); }

            turbo::Status Visit(const MapType &type) {
                export_.format_ = "+m";
                if (type.keys_sorted()) {
                    flags_ |= NEBULA_FLAG_MAP_KEYS_SORTED;
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const UnionType &type) {
                std::string &s = export_.format_;
                s = "+u";
                if (type.mode() == UnionMode::DENSE) {
                    s += "d:";
                } else {
                    DKCHECK_EQ(type.mode(), UnionMode::SPARSE);
                    s += "s:";
                }
                bool first = true;
                for (const auto code: type.type_codes()) {
                    if (!first) {
                        s += ",";
                    }
                    s += ToChars(code);
                    first = false;
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const RunEndEncodedType &type) { return SetFormat("+r"); }

            ExportedSchemaPrivateData export_;
            int64_t flags_ = 0;
            std::vector<std::pair<std::string, std::string> > additional_metadata_;
            std::unique_ptr<SchemaExporter> dict_exporter_;
            std::vector<SchemaExporter> child_exporters_;
        };
    } // namespace

    turbo::Status ExportType(const DataType &type, struct ArrowSchema *out) {
        SchemaExporter exporter;
        TURBO_RETURN_NOT_OK(exporter.ExportType(type));
        exporter.Finish(out);
        return turbo::OkStatus();
    }

    turbo::Status ExportField(const Field &field, struct ArrowSchema *out) {
        SchemaExporter exporter;
        TURBO_RETURN_NOT_OK(exporter.ExportField(field));
        exporter.Finish(out);
        return turbo::OkStatus();
    }

    turbo::Status ExportSchema(const Schema &schema, struct ArrowSchema *out) {
        SchemaExporter exporter;
        TURBO_RETURN_NOT_OK(exporter.ExportSchema(schema));
        exporter.Finish(out);
        return turbo::OkStatus();
    }

    //////////////////////////////////////////////////////////////////////////
    // C data export

    namespace {
        struct ExportedArrayPrivateData : PoolAllocationMixin<ExportedArrayPrivateData> {
            // The buffers are owned by the ArrayData member
            SmallVector<const void *, 3> buffers_;
            struct ArrowArray dictionary_{};
            SmallVector<struct ArrowArray, 1> children_;
            SmallVector<struct ArrowArray *, 4> child_pointers_;

            std::shared_ptr<ArrayData> data_;
            std::shared_ptr<Device::SyncEvent> sync_;
            std::vector<int64_t> variadic_buffer_sizes_;

            ExportedArrayPrivateData() = default;

            TURBO_DEFAULT_MOVE_AND_ASSIGN(ExportedArrayPrivateData);

            TURBO_DISALLOW_COPY_AND_ASSIGN(ExportedArrayPrivateData);
        };

        void ReleaseExportedArray(struct ArrowArray *array) {
            if (ArrowArrayIsReleased(array)) {
                return;
            }
            for (int64_t i = 0; i < array->n_children; ++i) {
                struct ArrowArray *child = array->children[i];
                ArrowArrayRelease(child);
                DKCHECK(ArrowArrayIsReleased(child))
        << "Child release callback should have marked it released";
            }
            struct ArrowArray *dict = array->dictionary;
            if (dict != nullptr) {
                ArrowArrayRelease(dict);
                DKCHECK(ArrowArrayIsReleased(dict))
        << "Dictionary release callback should have marked it released";
            }
            DKCHECK_NE(array->private_data, nullptr);
            auto *pdata = reinterpret_cast<ExportedArrayPrivateData *>(array->private_data);
            delete pdata;

            ArrowArrayMarkReleased(array);
        }

        struct ArrayExporter {
            explicit ArrayExporter(bool device_interface = false)
                : device_interface_(device_interface) {
            }

            turbo::Status Export(const std::shared_ptr<ArrayData> &data) {
                // Force computing null count.
                // This is because ARROW-9037 is in version 0.17 and 0.17.1, and they are
                // not able to import arrays without a null bitmap and null_count == -1.
                data->get_null_count();
                // Store buffer pointers
                size_t n_buffers = data->buffers.size();
                auto buffers_begin = data->buffers.begin();
                if (n_buffers > 0 && !internal::may_have_validity_bitmap(data->type->id())) {
                    --n_buffers;
                    ++buffers_begin;
                }

                bool need_variadic_buffer_sizes =
                        data->type->id() == Type::BINARY_VIEW || data->type->id() == Type::STRING_VIEW;
                if (need_variadic_buffer_sizes) {
                    ++n_buffers;
                }

                export_.buffers_.resize(n_buffers);
                std::transform(buffers_begin, data->buffers.end(), export_.buffers_.begin(),
                               [this](const std::shared_ptr<Buffer> &buffer) -> const void * {
                                   return buffer
                                              ? (device_interface_
                                                     ? reinterpret_cast<const void *>(buffer->address())
                                                     : buffer->data())
                                              : nullptr;
                               });

                if (need_variadic_buffer_sizes) {
                    auto variadic_buffers = turbo::span(data->buffers.data(), data->buffers.size()).subspan(2);
                    export_.variadic_buffer_sizes_.resize(variadic_buffers.size());
                    size_t i = 0;
                    for (const auto &buf: variadic_buffers) {
                        export_.variadic_buffer_sizes_[i++] = buf->size();
                    }
                    export_.buffers_.back() = export_.variadic_buffer_sizes_.data();
                }

                // Export dictionary
                if (data->dictionary != nullptr) {
                    dict_exporter_ = std::make_unique<ArrayExporter>(device_interface_);
                    TURBO_RETURN_NOT_OK(dict_exporter_->Export(data->dictionary));
                }

                // Export children
                export_.children_.resize(data->child_data.size());
                child_exporters_.reserve(data->child_data.size());
                for (const auto &child: data->child_data) {
                    child_exporters_.emplace_back(ArrayExporter{device_interface_});
                    TURBO_RETURN_NOT_OK(child_exporters_.back().Export(child));
                }

                // Store owning pointer to ArrayData
                export_.data_ = data;

                export_.sync_ = nullptr;
                return turbo::OkStatus();
            }

            // Finalize exporting by setting C struct fields and allocating
            // autonomous private data for each array node.
            //
            // This function can't fail, as properly reclaiming memory in case of error
            // would be too fragile.  After this function returns, memory is reclaimed
            // by calling the release() pointer in the top level ArrowArray struct.
            void Finish(struct ArrowArray *c_struct_) {
                // First, create permanent ExportedArrayPrivateData, to make sure that
                // child ArrayData pointers don't get invalidated.
                auto pdata = new ExportedArrayPrivateData(std::move(export_));
                const ArrayData &data = *pdata->data_;

                // Second, finish dictionary and children.
                if (dict_exporter_) {
                    dict_exporter_->Finish(&pdata->dictionary_);
                }
                pdata->child_pointers_.resize(data.child_data.size(), nullptr);
                for (size_t i = 0; i < data.child_data.size(); ++i) {
                    auto ptr = &pdata->children_[i];
                    pdata->child_pointers_[i] = ptr;
                    child_exporters_[i].Finish(ptr);
                }

                // Third, fill C struct.
                DKCHECK_NE(c_struct_, nullptr);
                memset(c_struct_, 0, sizeof(*c_struct_));

                c_struct_->length = data.length;
                c_struct_->null_count = data.null_count;
                c_struct_->offset = data.offset;
                c_struct_->n_buffers = static_cast<int64_t>(pdata->buffers_.size());
                c_struct_->n_children = static_cast<int64_t>(pdata->child_pointers_.size());
                c_struct_->buffers = pdata->buffers_.data();
                c_struct_->children = c_struct_->n_children ? pdata->child_pointers_.data() : nullptr;
                c_struct_->dictionary = dict_exporter_ ? &pdata->dictionary_ : nullptr;
                c_struct_->private_data = pdata;
                c_struct_->release = ReleaseExportedArray;
            }

            ExportedArrayPrivateData export_;
            std::unique_ptr<ArrayExporter> dict_exporter_;
            std::vector<ArrayExporter> child_exporters_;
            bool device_interface_ = false;
        };
    } // namespace

    turbo::Status ExportArray(const Array &array, struct ArrowArray *out,
                              struct ArrowSchema *out_schema) {
        SchemaExportGuard guard(out_schema);
        if (out_schema != nullptr) {
            TURBO_RETURN_NOT_OK(ExportType(*array.type(), out_schema));
        }
        ArrayExporter exporter;
        TURBO_RETURN_NOT_OK(exporter.Export(array.data()));
        exporter.Finish(out);
        guard.Detach();
        return turbo::OkStatus();
    }

    turbo::Status ExportRecordBatch(const RecordBatch &batch, struct ArrowArray *out,
                                    struct ArrowSchema *out_schema) {
        // XXX perhaps bypass ToStructArray() for speed?
        TURBO_MOVE_OR_RAISE(auto array, batch.ToStructArray());

        SchemaExportGuard guard(out_schema);
        if (out_schema != nullptr) {
            // Export the schema, not the struct type, so as not to lose top-level metadata
            TURBO_RETURN_NOT_OK(ExportSchema(*batch.schema(), out_schema));
        }
        ArrayExporter exporter;
        TURBO_RETURN_NOT_OK(exporter.Export(array->data()));
        exporter.Finish(out);
        guard.Detach();
        return turbo::OkStatus();
    }

    //////////////////////////////////////////////////////////////////////////
    // C device arrays

    turbo::Status ValidateDeviceInfo(const ArrayData &data,
                                     std::optional<DeviceAllocationType> *device_type,
                                     int64_t *device_id) {
        for (const auto &buf: data.buffers) {
            if (!buf) {
                continue;
            }

            if (*device_type == std::nullopt) {
                *device_type = buf->device_type();
                *device_id = buf->device()->device_id();
                continue;
            }

            if (buf->device_type() != *device_type) {
                return turbo::invalid_argument_error(
                    "Exporting device array with buffers on more than one device.");
            }

            if (buf->device()->device_id() != *device_id) {
                return turbo::invalid_argument_error(
                    "Exporting device array with buffers on multiple device ids.");
            }
        }

        for (const auto &child: data.child_data) {
            TURBO_RETURN_NOT_OK(ValidateDeviceInfo(*child, device_type, device_id));
        }

        return turbo::OkStatus();
    }

    turbo::Result<std::pair<std::optional<DeviceAllocationType>, int64_t> > ValidateDeviceInfo(
        const ArrayData &data) {
        std::optional<DeviceAllocationType> device_type;
        int64_t device_id = -1;
        TURBO_RETURN_NOT_OK(ValidateDeviceInfo(data, &device_type, &device_id));
        return std::make_pair(device_type, device_id);
    }

    turbo::Status ExportDeviceArray(const Array &array, std::shared_ptr<Device::SyncEvent> sync,
                                    struct ArrowDeviceArray *out, struct ArrowSchema *out_schema) {
        void *sync_event = sync ? sync->get_raw() : nullptr;

        SchemaExportGuard guard(out_schema);
        if (out_schema != nullptr) {
            TURBO_RETURN_NOT_OK(ExportType(*array.type(), out_schema));
        }

        TURBO_MOVE_OR_RAISE(auto device_info, ValidateDeviceInfo(*array.data()));
        if (!device_info.first) {
            out->device_type = NEBULA_DEVICE_CPU;
        } else {
            out->device_type = static_cast<ArrowDeviceType>(*device_info.first);
        }
        out->device_id = device_info.second;

        ArrayExporter exporter(/*device_interface*/ true);
        TURBO_RETURN_NOT_OK(exporter.Export(array.data()));
        exporter.Finish(&out->array);

        auto *pdata = reinterpret_cast<ExportedArrayPrivateData *>(out->array.private_data);
        pdata->sync_ = std::move(sync);
        out->sync_event = sync_event;

        guard.Detach();
        return turbo::OkStatus();
    }

    turbo::Status ExportDeviceRecordBatch(const RecordBatch &batch,
                                          std::shared_ptr<Device::SyncEvent> sync,
                                          struct ArrowDeviceArray *out,
                                          struct ArrowSchema *out_schema) {
        void *sync_event{nullptr};
        if (sync) {
            sync_event = sync->get_raw();
        }

        // XXX perhaps bypass ToStructArray for speed?
        TURBO_MOVE_OR_RAISE(auto array, batch.ToStructArray());

        SchemaExportGuard guard(out_schema);
        if (out_schema != nullptr) {
            // Export the schema, not the struct type, so as not to lose top-level metadata
            TURBO_RETURN_NOT_OK(ExportSchema(*batch.schema(), out_schema));
        }

        TURBO_MOVE_OR_RAISE(auto device_info, ValidateDeviceInfo(*array->data()));
        if (!device_info.first) {
            out->device_type = NEBULA_DEVICE_CPU;
        } else {
            out->device_type = static_cast<ArrowDeviceType>(*device_info.first);
        }
        out->device_id = device_info.second;

        ArrayExporter exporter(/*device_interface*/ true);
        TURBO_RETURN_NOT_OK(exporter.Export(array->data()));
        exporter.Finish(&out->array);

        auto *pdata = reinterpret_cast<ExportedArrayPrivateData *>(out->array.private_data);
        pdata->sync_ = std::move(sync);
        out->sync_event = sync_event;

        guard.Detach();
        return turbo::OkStatus();
    }

    //////////////////////////////////////////////////////////////////////////
    // C schema import

    namespace {
        static constexpr int64_t kMaxImportRecursionLevel = 64;

        turbo::Status InvalidFormatString(std::string_view v) {
            return turbo::invalid_argument_error("Invalid or unsupported format string: '", v, "'");
        }

        class FormatStringParser {
        public:
            FormatStringParser() = default;

            explicit FormatStringParser(std::string_view v) : view_(v), index_(0) {
            }

            bool AtEnd() const { return index_ >= view_.length(); }

            char Next() { return view_[index_++]; }

            std::string_view Rest() { return view_.substr(index_); }

            turbo::Status CheckNext(char c) {
                if (AtEnd() || Next() != c) {
                    return Invalid();
                }
                return turbo::OkStatus();
            }

            turbo::Status CheckHasNext() {
                if (AtEnd()) {
                    return Invalid();
                }
                return turbo::OkStatus();
            }

            turbo::Status CheckAtEnd() {
                if (!AtEnd()) {
                    return Invalid();
                }
                return turbo::OkStatus();
            }

            template<typename IntType = int32_t>
            turbo::Result<IntType> ParseInt(std::string_view v) {
                using ArrowIntType = typename CTypeTraits<IntType>::ArrowType;
                IntType value;
                if (!internal::ParseValue<ArrowIntType>(v.data(), v.size(), &value)) {
                    return Invalid();
                }
                return value;
            }

            turbo::Result<TimeUnit::type> ParseTimeUnit() {
                TURBO_RETURN_NOT_OK(CheckHasNext());
                switch (Next()) {
                    case 's':
                        return TimeUnit::SECOND;
                    case 'm':
                        return TimeUnit::MILLI;
                    case 'u':
                        return TimeUnit::MICRO;
                    case 'n':
                        return TimeUnit::NANO;
                    default:
                        return Invalid();
                }
            }

            SmallVector<std::string_view, 2> Split(std::string_view v, char delim = ',') {
                SmallVector<std::string_view, 2> parts;
                size_t start = 0, end;
                while (true) {
                    end = v.find_first_of(delim, start);
                    parts.push_back(v.substr(start, end - start));
                    if (end == std::string_view::npos) {
                        break;
                    }
                    start = end + 1;
                }
                return parts;
            }

            template<typename IntType = int32_t>
            turbo::Result<std::vector<IntType> > ParseInts(std::string_view v) {
                std::vector<IntType> result;
                if (v.empty()) return result;
                auto parts = Split(v);
                result.reserve(parts.size());
                for (const auto &p: parts) {
                    TURBO_MOVE_OR_RAISE(auto i, ParseInt<IntType>(p));
                    result.push_back(i);
                }
                return result;
            }

            turbo::Status Invalid() { return InvalidFormatString(view_); }

        protected:
            std::string_view view_;
            size_t index_;
        };

        struct DecodedMetadata {
            std::shared_ptr<KeyValueMetadata> metadata;
            std::string extension_name;
            std::string extension_serialized;
            int extension_name_index = -1; // index of extension_name in metadata
            int extension_serialized_index = -1; // index of extension_serialized in metadata
        };

        turbo::Result<DecodedMetadata> DecodeMetadata(const char *metadata) {
            auto read_int32 = [&](int32_t *out) -> turbo::Status {
                int32_t v;
                memcpy(&v, metadata, 4);
                metadata += 4;
                *out = v;
                if (*out < 0) {
                    return turbo::invalid_argument_error("Invalid encoded metadata string");
                }
                return turbo::OkStatus();
            };

            auto read_string = [&](std::string *out) -> turbo::Status {
                int32_t len;
                TURBO_RETURN_NOT_OK(read_int32(&len));
                out->resize(len);
                if (len > 0) {
                    memcpy(&(*out)[0], metadata, len);
                    metadata += len;
                }
                return turbo::OkStatus();
            };

            DecodedMetadata decoded;

            if (metadata == nullptr) {
                return decoded;
            }
            int32_t npairs;
            TURBO_RETURN_NOT_OK(read_int32(&npairs));
            if (npairs == 0) {
                return decoded;
            }
            std::vector<std::string> keys(npairs);
            std::vector<std::string> values(npairs);
            for (int32_t i = 0; i < npairs; ++i) {
                TURBO_RETURN_NOT_OK(read_string(&keys[i]));
                TURBO_RETURN_NOT_OK(read_string(&values[i]));
                if (keys[i] == kExtensionTypeKeyName) {
                    decoded.extension_name = values[i];
                    decoded.extension_name_index = i;
                } else if (keys[i] == kExtensionMetadataKeyName) {
                    decoded.extension_serialized = values[i];
                    decoded.extension_serialized_index = i;
                }
            }
            decoded.metadata = key_value_metadata(std::move(keys), std::move(values));
            return decoded;
        }

        struct SchemaImporter {
            turbo::Status Import(struct ArrowSchema *src) {
                if (ArrowSchemaIsReleased(src)) {
                    return turbo::invalid_argument_error("Cannot import released ArrowSchema");
                }
                guard_.Reset(src);
                recursion_level_ = 0;
                c_struct_ = src;
                return DoImport();
            }

            turbo::Result<std::shared_ptr<Field> > MakeField() const {
                const char *name = c_struct_->name ? c_struct_->name : "";
                bool nullable = (c_struct_->flags & NEBULA_FLAG_NULLABLE) != 0;
                return field(name, type_, nullable, std::move(metadata_.metadata));
            }

            turbo::Result<std::shared_ptr<Schema> > MakeSchema() const {
                if (type_->id() != Type::STRUCT) {
                    return turbo::invalid_argument_error(
                        "Cannot import schema: ArrowSchema describes non-struct type ",
                        type_->to_string());
                }
                return schema(type_->fields(), std::move(metadata_.metadata));
            }

            turbo::Result<std::shared_ptr<DataType> > MakeType() const { return type_; }

        protected:
            turbo::Status ImportChild(const SchemaImporter *parent, struct ArrowSchema *src) {
                if (ArrowSchemaIsReleased(src)) {
                    return turbo::invalid_argument_error("Cannot import released ArrowSchema");
                }
                recursion_level_ = parent->recursion_level_ + 1;
                if (recursion_level_ >= kMaxImportRecursionLevel) {
                    return turbo::invalid_argument_error("Recursion level in ArrowSchema struct exceeded");
                }
                // The ArrowSchema is owned by its parent, so don't release it ourselves
                c_struct_ = src;
                return DoImport();
            }

            turbo::Status ImportDict(const SchemaImporter *parent, struct ArrowSchema *src) {
                return ImportChild(parent, src);
            }

            turbo::Status DoImport() {
                // First import children (required for reconstituting parent type)
                child_importers_.resize(c_struct_->n_children);
                for (int64_t i = 0; i < c_struct_->n_children; ++i) {
                    DKCHECK_NE(c_struct_->children[i], nullptr);
                    TURBO_RETURN_NOT_OK(child_importers_[i].ImportChild(this, c_struct_->children[i]));
                }

                // Import main type
                TURBO_RETURN_NOT_OK(ProcessFormat());
                DKCHECK_NE(type_, nullptr);

                // Import dictionary type
                if (c_struct_->dictionary != nullptr) {
                    // Check this index type
                    if (!is_integer(type_->id())) {
                        return turbo::invalid_argument_error(
                            "ArrowSchema struct has a dictionary but is not an integer type: ",
                            type_->to_string());
                    }
                    SchemaImporter dict_importer;
                    TURBO_RETURN_NOT_OK(dict_importer.ImportDict(this, c_struct_->dictionary));
                    bool ordered = (c_struct_->flags & NEBULA_FLAG_DICTIONARY_ORDERED) != 0;
                    type_ = dictionary(type_, dict_importer.type_, ordered);
                }

                // Import metadata
                TURBO_MOVE_OR_RAISE(metadata_, DecodeMetadata(c_struct_->metadata));

                // Detect extension type
                if (!metadata_.extension_name.empty()) {
                    const auto registered_ext_type = GetExtensionType(metadata_.extension_name);
                    if (registered_ext_type) {
                        TURBO_MOVE_OR_RAISE(
                            type_, registered_ext_type->deserialize(std::move(type_),
                                metadata_.extension_serialized));
                        // If metadata is present, delete both metadata keys (otherwise, just remove
                        // the extension name key)
                        if (metadata_.extension_serialized_index >= 0) {
                            TURBO_RETURN_NOT_OK(metadata_.metadata->DeleteMany(
                                {metadata_.extension_name_index, metadata_.extension_serialized_index}));
                        } else {
                            TURBO_RETURN_NOT_OK(metadata_.metadata->Delete(metadata_.extension_name_index));
                        }
                    }
                }

                return turbo::OkStatus();
            }

            turbo::Status ProcessFormat() {
                f_parser_ = FormatStringParser(c_struct_->format);
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'n':
                        return ProcessPrimitive(null());
                    case 'b':
                        return ProcessPrimitive(boolean());
                    case 'c':
                        return ProcessPrimitive(int8());
                    case 'C':
                        return ProcessPrimitive(uint8());
                    case 's':
                        return ProcessPrimitive(int16());
                    case 'S':
                        return ProcessPrimitive(uint16());
                    case 'i':
                        return ProcessPrimitive(int32());
                    case 'I':
                        return ProcessPrimitive(uint32());
                    case 'l':
                        return ProcessPrimitive(int64());
                    case 'L':
                        return ProcessPrimitive(uint64());
                    case 'e':
                        return ProcessPrimitive(float16());
                    case 'f':
                        return ProcessPrimitive(float32());
                    case 'g':
                        return ProcessPrimitive(float64());
                    case 'u':
                        return ProcessPrimitive(utf8());
                    case 'U':
                        return ProcessPrimitive(large_utf8());
                    case 'z':
                        return ProcessPrimitive(binary());
                    case 'Z':
                        return ProcessPrimitive(large_binary());
                    case 'v':
                        return ProcessBinaryView();
                    case 'w':
                        return ProcessFixedSizeBinary();
                    case 'd':
                        return ProcessDecimal();
                    case 't':
                        return ProcessTemporal();
                    case '+':
                        return ProcessNested();
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessBinaryView() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'z':
                        return ProcessPrimitive(binary_view());
                    case 'u':
                        return ProcessPrimitive(utf8_view());
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessTemporal() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'd':
                        return ProcessDate();
                    case 't':
                        return ProcessTime();
                    case 'D':
                        return ProcessDuration();
                    case 'i':
                        return ProcessInterval();
                    case 's':
                        return ProcessTimestamp();
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessNested() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'l':
                        return ProcessListLike<ListType>();
                    case 'L':
                        return ProcessListLike<LargeListType>();
                    case 'v': {
                        TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                        switch (f_parser_.Next()) {
                            case 'l':
                                return ProcessListView<ListViewType>();
                            case 'L':
                                return ProcessListView<LargeListViewType>();
                        }
                        break;
                    }
                    case 'w':
                        return ProcessFixedSizeList();
                    case 's':
                        return ProcessStruct();
                    case 'm':
                        return ProcessMap();
                    case 'u':
                        return ProcessUnion();
                    case 'r':
                        return ProcessREE();
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessDate() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'D':
                        return ProcessPrimitive(date32());
                    case 'm':
                        return ProcessPrimitive(date64());
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessInterval() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                switch (f_parser_.Next()) {
                    case 'D':
                        return ProcessPrimitive(day_time_interval());
                    case 'M':
                        return ProcessPrimitive(month_interval());
                    case 'n':
                        return ProcessPrimitive(month_day_nano_interval());
                }
                return f_parser_.Invalid();
            }

            turbo::Status ProcessTime() {
                TURBO_MOVE_OR_RAISE(auto unit, f_parser_.ParseTimeUnit());
                if (unit == TimeUnit::SECOND || unit == TimeUnit::MILLI) {
                    return ProcessPrimitive(time32(unit));
                } else {
                    return ProcessPrimitive(time64(unit));
                }
            }

            turbo::Status ProcessDuration() {
                TURBO_MOVE_OR_RAISE(auto unit, f_parser_.ParseTimeUnit());
                return ProcessPrimitive(duration(unit));
            }

            turbo::Status ProcessTimestamp() {
                TURBO_MOVE_OR_RAISE(auto unit, f_parser_.ParseTimeUnit());
                TURBO_RETURN_NOT_OK(f_parser_.CheckNext(':'));
                type_ = timestamp(unit, std::string(f_parser_.Rest()));
                return turbo::OkStatus();
            }

            turbo::Status ProcessFixedSizeBinary() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckNext(':'));
                TURBO_MOVE_OR_RAISE(auto byte_width, f_parser_.ParseInt(f_parser_.Rest()));
                if (byte_width < 0) {
                    return f_parser_.Invalid();
                }
                type_ = fixed_size_binary(byte_width);
                return turbo::OkStatus();
            }

            turbo::Status ProcessDecimal() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckNext(':'));
                TURBO_MOVE_OR_RAISE(auto prec_scale, f_parser_.ParseInts(f_parser_.Rest()));
                // 3 elements indicates bit width was communicated as well.
                if (prec_scale.size() != 2 && prec_scale.size() != 3) {
                    return f_parser_.Invalid();
                }
                if (prec_scale[0] <= 0) {
                    return f_parser_.Invalid();
                }
                if (prec_scale.size() == 2 || prec_scale[2] == 128) {
                    type_ = decimal128(prec_scale[0], prec_scale[1]);
                } else if (prec_scale[2] == 256) {
                    type_ = decimal256(prec_scale[0], prec_scale[1]);
                } else {
                    return f_parser_.Invalid();
                }
                return turbo::OkStatus();
            }

            turbo::Status ProcessPrimitive(const std::shared_ptr<DataType> &type) {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                type_ = type;
                return CheckNoChildren(type);
            }

            template<typename ListType>
            turbo::Status ProcessListLike() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_MOVE_OR_RAISE(auto field, MakeChildField(0));
                type_ = std::make_shared<ListType>(field);
                return turbo::OkStatus();
            }

            template<typename ListViewType>
            turbo::Status ProcessListView() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_MOVE_OR_RAISE(auto field, MakeChildField(0));
                type_ = std::make_shared<ListViewType>(std::move(field));
                return turbo::OkStatus();
            }

            turbo::Status ProcessMap() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_MOVE_OR_RAISE(auto field, MakeChildField(0));
                const auto &value_type = field->type();
                if (value_type->id() != Type::STRUCT) {
                    return turbo::invalid_argument_error("Imported map array has unexpected child field type: ",
                                                         field->to_string());
                }
                if (value_type->num_fields() != 2) {
                    return turbo::invalid_argument_error("Imported map array has unexpected child field type: ",
                                                         field->to_string());
                }

                bool keys_sorted = (c_struct_->flags & NEBULA_FLAG_MAP_KEYS_SORTED);
                bool values_nullable = value_type->field(1)->nullable();
                // Some implementations of Arrow (such as Rust) use a non-standard field name
                // for key ("keys") and value ("values") fields. For simplicity, we override
                // them on import.
                auto values_field =
                        ::nebula::field("value", value_type->field(1)->type(), values_nullable);
                type_ = map(value_type->field(0)->type(), values_field, keys_sorted);
                return turbo::OkStatus();
            }

            turbo::Status ProcessFixedSizeList() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckNext(':'));
                TURBO_MOVE_OR_RAISE(auto list_size, f_parser_.ParseInt(f_parser_.Rest()));
                if (list_size < 0) {
                    return f_parser_.Invalid();
                }
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_MOVE_OR_RAISE(auto field, MakeChildField(0));
                type_ = fixed_size_list(field, list_size);
                return turbo::OkStatus();
            }

            turbo::Status ProcessStruct() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                TURBO_MOVE_OR_RAISE(auto fields, MakeChildFields());
                type_ = STRUCT(std::move(fields));
                return turbo::OkStatus();
            }

            turbo::Status ProcessUnion() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckHasNext());
                UnionMode::type mode;
                switch (f_parser_.Next()) {
                    case 'd':
                        mode = UnionMode::DENSE;
                        break;
                    case 's':
                        mode = UnionMode::SPARSE;
                        break;
                    default:
                        return f_parser_.Invalid();
                }
                TURBO_RETURN_NOT_OK(f_parser_.CheckNext(':'));
                TURBO_MOVE_OR_RAISE(auto type_codes, f_parser_.ParseInts<int8_t>(f_parser_.Rest()));
                TURBO_MOVE_OR_RAISE(auto fields, MakeChildFields());
                if (fields.size() != type_codes.size()) {
                    return turbo::invalid_argument_error(
                        "ArrowArray struct number of children incompatible with format string "
                        "(mismatching number of union type codes) ",
                        "'", c_struct_->format, "'");
                }
                for (const auto code: type_codes) {
                    if (code < 0) {
                        return turbo::invalid_argument_error("Negative type code in union: format string '",
                                                             c_struct_->format, "'");
                    }
                }
                if (mode == UnionMode::SPARSE) {
                    type_ = sparse_union(std::move(fields), std::move(type_codes));
                } else {
                    type_ = dense_union(std::move(fields), std::move(type_codes));
                }
                return turbo::OkStatus();
            }

            turbo::Status ProcessREE() {
                TURBO_RETURN_NOT_OK(f_parser_.CheckAtEnd());
                TURBO_RETURN_NOT_OK(CheckNumChildren(2));
                TURBO_MOVE_OR_RAISE(auto run_ends_field, MakeChildField(0));
                TURBO_MOVE_OR_RAISE(auto values_field, MakeChildField(1));
                if (!is_run_end_type(run_ends_field->type()->id())) {
                    return turbo::invalid_argument_error("Expected a valid run-end integer type, but struct has ",
                                                         run_ends_field->type()->to_string());
                }
                if (values_field->type()->id() == Type::RUN_END_ENCODED) {
                    return turbo::invalid_argument_error("ArrowArray struct contains a nested run-end encoded array");
                }
                type_ = run_end_encoded(run_ends_field->type(), values_field->type());
                return turbo::OkStatus();
            }

            turbo::Result<std::shared_ptr<Field> > MakeChildField(int64_t child_id) {
                const auto &child = child_importers_[child_id];
                if (child.c_struct_->name == nullptr) {
                    return turbo::invalid_argument_error("Expected non-null name in imported array child");
                }
                return child.MakeField();
            }

            turbo::Result<std::vector<std::shared_ptr<Field> > > MakeChildFields() {
                std::vector<std::shared_ptr<Field> > fields(child_importers_.size());
                for (int64_t i = 0; i < static_cast<int64_t>(child_importers_.size()); ++i) {
                    TURBO_MOVE_OR_RAISE(fields[i], MakeChildField(i));
                }
                return fields;
            }

            turbo::Status CheckNoChildren(const std::shared_ptr<DataType> &type) {
                return CheckNumChildren(type, 0);
            }

            turbo::Status CheckNumChildren(const std::shared_ptr<DataType> &type, int64_t n_children) {
                if (c_struct_->n_children != n_children) {
                    return turbo::invalid_argument_error("Expected ", n_children, " children for imported type ",
                                                         *type, ", ArrowArray struct has ", c_struct_->n_children);
                }
                return turbo::OkStatus();
            }

            turbo::Status CheckNumChildren(int64_t n_children) {
                if (c_struct_->n_children != n_children) {
                    return turbo::invalid_argument_error("Expected ", n_children, " children for imported format '",
                                                         c_struct_->format, "', ArrowArray struct has ",
                                                         c_struct_->n_children);
                }
                return turbo::OkStatus();
            }

            struct ArrowSchema *c_struct_{nullptr};
            SchemaExportGuard guard_{nullptr};
            FormatStringParser f_parser_;
            int64_t recursion_level_;
            std::vector<SchemaImporter> child_importers_;
            std::shared_ptr<DataType> type_;
            DecodedMetadata metadata_;
        };
    } // namespace

    turbo::Result<std::shared_ptr<DataType> > ImportType(struct ArrowSchema *schema) {
        SchemaImporter importer;
        TURBO_RETURN_NOT_OK(importer.Import(schema));
        return importer.MakeType();
    }

    turbo::Result<std::shared_ptr<Field> > ImportField(struct ArrowSchema *schema) {
        SchemaImporter importer;
        TURBO_RETURN_NOT_OK(importer.Import(schema));
        return importer.MakeField();
    }

    turbo::Result<std::shared_ptr<Schema> > ImportSchema(struct ArrowSchema *schema) {
        SchemaImporter importer;
        TURBO_RETURN_NOT_OK(importer.Import(schema));
        return importer.MakeSchema();
    }

    //////////////////////////////////////////////////////////////////////////
    // C data import

    namespace {
        // A wrapper struct for an imported C ArrowArray.
        // The ArrowArray is released on destruction.
        struct ImportedArrayData {
            struct ArrowArray array_;
            DeviceAllocationType device_type_;
            std::shared_ptr<Device::SyncEvent> device_sync_;

            ImportedArrayData() {
                ArrowArrayMarkReleased(&array_); // Initially released
            }

            void Release() {
                if (!ArrowArrayIsReleased(&array_)) {
                    ArrowArrayRelease(&array_);
                    DKCHECK(ArrowArrayIsReleased(&array_));
                }
            }

            ~ImportedArrayData() { Release(); }

        private:
            TURBO_DISALLOW_COPY_AND_ASSIGN(ImportedArrayData);
        };

        // A buffer wrapping an imported piece of data.
        class ImportedBuffer : public Buffer {
        public:
            ImportedBuffer(const uint8_t *data, int64_t size,
                           std::shared_ptr<ImportedArrayData> import)
                : Buffer(data, size), import_(std::move(import)) {
            }

            ImportedBuffer(const uint8_t *data, int64_t size, std::shared_ptr<MemoryManager> mm,
                           DeviceAllocationType device_type,
                           std::shared_ptr<ImportedArrayData> import)
                : Buffer(data, size, mm, nullptr, device_type), import_(std::move(import)) {
            }

            ~ImportedBuffer() override = default;

            std::shared_ptr<Device::SyncEvent> device_sync_event() const override {
                return import_->device_sync_;
            }

        protected:
            std::shared_ptr<ImportedArrayData> import_;
        };

        struct ArrayImporter {
            explicit ArrayImporter(const std::shared_ptr<DataType> &type)
                : type_(type), zero_size_buffer_(std::make_shared<Buffer>(kZeroSizeArea, 0)) {
            }

            turbo::Status Import(struct ArrowDeviceArray *src, const DeviceMemoryMapper &mapper) {
                TURBO_MOVE_OR_RAISE(memory_mgr_, mapper(src->device_type, src->device_id));
                device_type_ = static_cast<DeviceAllocationType>(src->device_type);
                TURBO_RETURN_NOT_OK(Import(&src->array));
                if (src->sync_event != nullptr) {
                    TURBO_MOVE_OR_RAISE(import_->device_sync_, memory_mgr_->WrapDeviceSyncEvent(
                                            src->sync_event, [](void*) {}));
                }
                // reset internal state before next import
                memory_mgr_.reset();
                device_type_ = DeviceAllocationType::kCPU;
                return turbo::OkStatus();
            }

            turbo::Status Import(struct ArrowArray *src) {
                if (ArrowArrayIsReleased(src)) {
                    return turbo::invalid_argument_error("Cannot import released ArrowArray");
                }
                recursion_level_ = 0;
                import_ = std::make_shared<ImportedArrayData>();
                c_struct_ = &import_->array_;
                import_->device_type_ = device_type_;
                ArrowArrayMove(src, c_struct_);
                return DoImport();
            }

            turbo::Result<std::shared_ptr<Array> > MakeArray() {
                DKCHECK_NE(data_, nullptr);
                return ::nebula::make_array(data_);
            }

            std::shared_ptr<ArrayData> GetArrayData() {
                DKCHECK_NE(data_, nullptr);
                return data_;
            }

            turbo::Result<std::shared_ptr<RecordBatch> > MakeRecordBatch(std::shared_ptr<Schema> schema) {
                DKCHECK_NE(data_, nullptr);
                if (data_->get_null_count() != 0) {
                    return turbo::invalid_argument_error(
                        "ArrowArray struct has non-zero null count, "
                        "cannot be imported as RecordBatch");
                }
                if (data_->offset != 0) {
                    return turbo::invalid_argument_error(
                        "ArrowArray struct has non-zero offset, "
                        "cannot be imported as RecordBatch");
                }
                return RecordBatch::create(std::move(schema), data_->length,
                                         std::move(data_->child_data), import_->device_type_,
                                         import_->device_sync_);
            }

            turbo::Status ImportChild(const ArrayImporter *parent, struct ArrowArray *src) {
                if (ArrowArrayIsReleased(src)) {
                    return turbo::invalid_argument_error("Cannot import released ArrowArray");
                }
                recursion_level_ = parent->recursion_level_ + 1;
                if (recursion_level_ >= kMaxImportRecursionLevel) {
                    return turbo::invalid_argument_error("Recursion level in ArrowArray struct exceeded");
                }
                device_type_ = parent->device_type_;
                memory_mgr_ = parent->memory_mgr_;
                // Child buffers will keep the entire parent import alive.
                // Perhaps we can move the child structs to an owned area
                // when the parent ImportedArrayData::Release() gets called,
                // but that is another level of complication.
                import_ = parent->import_;
                // The ArrowArray shouldn't be moved, it's owned by its parent
                c_struct_ = src;
                return DoImport();
            }

            turbo::Status ImportDict(const ArrayImporter *parent, struct ArrowArray *src) {
                return ImportChild(parent, src);
            }

            turbo::Status DoImport() {
                // Unwrap extension type
                const DataType *storage_type = type_.get();
                if (storage_type->id() == Type::EXTENSION) {
                    storage_type =
                            checked_cast<const ExtensionType &>(*storage_type).storage_type().get();
                }

                // First import children (required for reconstituting parent array data)
                const auto &fields = storage_type->fields();
                if (c_struct_->n_children != static_cast<int64_t>(fields.size())) {
                    return turbo::invalid_argument_error("ArrowArray struct has ", c_struct_->n_children,
                                                         " children, expected ", fields.size(), " for type ",
                                                         type_->to_string());
                }
                child_importers_.reserve(fields.size());
                for (int64_t i = 0; i < c_struct_->n_children; ++i) {
                    DKCHECK_NE(c_struct_->children[i], nullptr);
                    child_importers_.emplace_back(fields[i]->type());
                    TURBO_RETURN_NOT_OK(child_importers_.back().ImportChild(this, c_struct_->children[i]));
                }

                // Import main data
                TURBO_RETURN_NOT_OK(visit_type_inline(*storage_type, this));

                bool is_dict_type = (storage_type->id() == Type::DICTIONARY);
                if (c_struct_->dictionary != nullptr) {
                    if (!is_dict_type) {
                        return turbo::invalid_argument_error("Import type is ", type_->to_string(),
                                                             " but dictionary field in ArrowArray struct is not null");
                    }
                    const auto &dict_type = checked_cast<const DictionaryType &>(*storage_type);
                    // Import dictionary values
                    ArrayImporter dict_importer(dict_type.get_value_type());
                    TURBO_RETURN_NOT_OK(dict_importer.ImportDict(this, c_struct_->dictionary));
                    data_->dictionary = dict_importer.GetArrayData();
                } else {
                    if (is_dict_type) {
                        return turbo::invalid_argument_error("Import type is ", type_->to_string(),
                                                             " but dictionary field in ArrowArray struct is null");
                    }
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const DataType &type) {
                return turbo::unimplemented_error("Cannot import array of type ", type_->to_string());
            }

            turbo::Status Visit(const FixedWidthType &type) { return ImportFixedSizePrimitive(type); }

            turbo::Status Visit(const NullType &type) {
                TURBO_RETURN_NOT_OK(CheckNoChildren());
                if (c_struct_->n_buffers == 1) {
                    // Legacy format exported by older Arrow C++ versions
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                } else {
                    TURBO_RETURN_NOT_OK(CheckNumBuffers(0));
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                    data_->buffers.insert(data_->buffers.begin(), nullptr);
                }
                data_->null_count = data_->length;
                return turbo::OkStatus();
            }

            turbo::Status Visit(const StringType &type) { return ImportStringLike(type); }

            turbo::Status Visit(const BinaryType &type) { return ImportStringLike(type); }

            turbo::Status Visit(const LargeStringType &type) { return ImportStringLike(type); }

            turbo::Status Visit(const LargeBinaryType &type) { return ImportStringLike(type); }

            turbo::Status Visit(const StringViewType &type) { return ImportBinaryView(type); }

            turbo::Status Visit(const BinaryViewType &type) { return ImportBinaryView(type); }

            turbo::Status Visit(const ListType &type) { return ImportListLike(type); }

            turbo::Status Visit(const LargeListType &type) { return ImportListLike(type); }

            turbo::Status Visit(const ListViewType &type) { return ImportListView(type); }

            turbo::Status Visit(const LargeListViewType &type) { return ImportListView(type); }

            turbo::Status Visit(const FixedSizeListType &type) {
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_RETURN_NOT_OK(CheckNumBuffers(1));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                return turbo::OkStatus();
            }

            turbo::Status Visit(const StructType &type) {
                TURBO_RETURN_NOT_OK(CheckNumBuffers(1));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                return turbo::OkStatus();
            }

            turbo::Status Visit(const SparseUnionType &type) {
                TURBO_RETURN_NOT_OK(CheckNoNulls());
                if (c_struct_->n_buffers == 2) {
                    // ARROW-14179: legacy format exported by older Arrow C++ versions
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(1, sizeof(int8_t)));
                } else {
                    TURBO_RETURN_NOT_OK(CheckNumBuffers(1));
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(0, sizeof(int8_t)));
                    // Prepend a null bitmap buffer, as expected by SparseUnionArray
                    data_->buffers.insert(data_->buffers.begin(), nullptr);
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const DenseUnionType &type) {
                TURBO_RETURN_NOT_OK(CheckNoNulls());
                if (c_struct_->n_buffers == 3) {
                    // ARROW-14179: legacy format exported by older Arrow C++ versions
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(1, sizeof(int8_t)));
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(2, sizeof(int32_t)));
                } else {
                    TURBO_RETURN_NOT_OK(CheckNumBuffers(2));
                    TURBO_RETURN_NOT_OK(AllocateArrayData());
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(0, sizeof(int8_t)));
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(1, sizeof(int32_t)));
                    // Prepend a null bitmap pointer, as expected by DenseUnionArray
                    data_->buffers.insert(data_->buffers.begin(), nullptr);
                }
                return turbo::OkStatus();
            }

            turbo::Status Visit(const RunEndEncodedType &type) {
                TURBO_RETURN_NOT_OK(CheckNumChildren(2));
                TURBO_RETURN_NOT_OK(CheckNumBuffers(0));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                // Always have a null bitmap buffer as much of the code in arrow assumes
                // the buffers vector to have at least one entry on every array format.
                data_->buffers.emplace_back(nullptr);
                data_->null_count = 0;
                return turbo::OkStatus();
            }

            turbo::Status ImportFixedSizePrimitive(const FixedWidthType &type) {
                TURBO_RETURN_NOT_OK(CheckNoChildren());
                TURBO_RETURN_NOT_OK(CheckNumBuffers(2));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                if (bit_util::IsMultipleOf8(type.bit_width())) {
                    TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(1, type.bit_width() / 8));
                } else {
                    DKCHECK_EQ(type.bit_width(), 1);
                    TURBO_RETURN_NOT_OK(ImportBitsBuffer(1));
                }
                return turbo::OkStatus();
            }

            turbo::Status ImportBinaryView(const BinaryViewType &) {
                TURBO_RETURN_NOT_OK(CheckNoChildren());
                if (c_struct_->n_buffers < 3) {
                    return turbo::invalid_argument_error("Expected at least 3 buffers for imported type ",
                                                         type_->to_string(), ", ArrowArray struct has ",
                                                         c_struct_->n_buffers);
                }
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                TURBO_RETURN_NOT_OK(ImportFixedSizeBuffer(1, BinaryViewType::kSize));

                // The last C data buffer stores buffer sizes, and shouldn't be imported
                auto *buffer_sizes =
                        static_cast<const int64_t *>(c_struct_->buffers[c_struct_->n_buffers - 1]);

                for (int32_t buffer_id = 2; buffer_id < c_struct_->n_buffers - 1; ++buffer_id) {
                    TURBO_RETURN_NOT_OK(ImportBuffer(buffer_id, buffer_sizes[buffer_id - 2]));
                }
                data_->buffers.pop_back();
                return turbo::OkStatus();
            }

            template<typename StringType>
            turbo::Status ImportStringLike(const StringType &type) {
                TURBO_RETURN_NOT_OK(CheckNoChildren());
                TURBO_RETURN_NOT_OK(CheckNumBuffers(3));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                TURBO_RETURN_NOT_OK(ImportOffsetsBuffer<typename StringType::offset_type>(1));
                TURBO_RETURN_NOT_OK(ImportStringValuesBuffer<typename StringType::offset_type>(1, 2));
                return turbo::OkStatus();
            }

            template<typename ListType>
            turbo::Status ImportListLike(const ListType &type) {
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_RETURN_NOT_OK(CheckNumBuffers(2));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                TURBO_RETURN_NOT_OK(ImportOffsetsBuffer<typename ListType::offset_type>(1));
                return turbo::OkStatus();
            }

            template<typename ListViewType>
            turbo::Status ImportListView(const ListViewType &type) {
                using offset_type = typename ListViewType::offset_type;
                TURBO_RETURN_NOT_OK(CheckNumChildren(1));
                TURBO_RETURN_NOT_OK(CheckNumBuffers(3));
                TURBO_RETURN_NOT_OK(AllocateArrayData());
                TURBO_RETURN_NOT_OK(ImportNullBitmap());
                TURBO_RETURN_NOT_OK((ImportOffsetsBuffer<offset_type, /*with_extra_offset=*/false>(1)));
                TURBO_RETURN_NOT_OK(ImportSizesBuffer<offset_type>(2));
                return turbo::OkStatus();
            }

            turbo::Status CheckNoChildren() { return CheckNumChildren(0); }

            turbo::Status CheckNumChildren(int64_t n_children) {
                if (c_struct_->n_children != n_children) {
                    return turbo::invalid_argument_error("Expected ", n_children, " children for imported type ",
                                                         type_->to_string(), ", ArrowArray struct has ",
                                                         c_struct_->n_children);
                }
                return turbo::OkStatus();
            }

            turbo::Status CheckNumBuffers(int64_t n_buffers) {
                if (n_buffers != c_struct_->n_buffers) {
                    return turbo::invalid_argument_error("Expected ", n_buffers, " buffers for imported type ",
                                                         type_->to_string(), ", ArrowArray struct has ",
                                                         c_struct_->n_buffers);
                }
                return turbo::OkStatus();
            }

            turbo::Status CheckNoNulls() {
                if (c_struct_->null_count != 0) {
                    return turbo::invalid_argument_error("Unexpected non-zero null count for imported type ",
                                                         type_->to_string());
                }
                return turbo::OkStatus();
            }

            turbo::Status AllocateArrayData() {
                DKCHECK_EQ(data_, nullptr);
                data_ = std::make_shared<ArrayData>(type_, c_struct_->length, c_struct_->null_count,
                                                    c_struct_->offset);
                data_->buffers.resize(static_cast<size_t>(c_struct_->n_buffers));
                data_->child_data.resize(static_cast<size_t>(c_struct_->n_children));
                DKCHECK_EQ(child_importers_.size(), data_->child_data.size());
                std::transform(child_importers_.begin(), child_importers_.end(),
                               data_->child_data.begin(),
                               [](const ArrayImporter &child) { return child.data_; });
                return turbo::OkStatus();
            }

            turbo::Status ImportNullBitmap(int32_t buffer_id = 0) {
                TURBO_RETURN_NOT_OK(ImportBitsBuffer(buffer_id, /*is_null_bitmap=*/true));
                if (data_->null_count > 0 && data_->buffers[buffer_id] == nullptr) {
                    return turbo::invalid_argument_error(
                        "ArrowArray struct has null bitmap buffer but non-zero null_count ",
                        data_->null_count);
                }
                return turbo::OkStatus();
            }

            turbo::Status ImportBitsBuffer(int32_t buffer_id, bool is_null_bitmap = false) {
                // Compute visible size of buffer
                int64_t buffer_size =
                        (c_struct_->length > 0)
                            ? bit_util::BytesForBits(c_struct_->length + c_struct_->offset)
                            : 0;
                return ImportBuffer(buffer_id, buffer_size, is_null_bitmap);
            }

            turbo::Status ImportFixedSizeBuffer(int32_t buffer_id, int64_t byte_width) {
                // Compute visible size of buffer
                int64_t buffer_size = (c_struct_->length > 0)
                                          ? byte_width * (c_struct_->length + c_struct_->offset)
                                          : 0;
                return ImportBuffer(buffer_id, buffer_size);
            }

            template<typename OffsetType, bool with_extra_offset = true>
            turbo::Status ImportOffsetsBuffer(int32_t buffer_id) {
                // Compute visible size of buffer
                int64_t buffer_size = sizeof(OffsetType) * (c_struct_->length + c_struct_->offset +
                                                            (with_extra_offset ? 1 : 0));
                return ImportBuffer(buffer_id, buffer_size);
            }

            template<typename OffsetType>
            turbo::Status ImportSizesBuffer(int32_t buffer_id) {
                // Compute visible size of buffer
                int64_t buffer_size = sizeof(OffsetType) * (c_struct_->length + c_struct_->offset);
                return ImportBuffer(buffer_id, buffer_size);
            }

            template<typename OffsetType>
            turbo::Status ImportStringValuesBuffer(int32_t offsets_buffer_id, int32_t buffer_id,
                                                   int64_t byte_width = 1) {
                int64_t buffer_size = 0;
                if (c_struct_->length > 0) {
                    int64_t last_offset_value_offset =
                            (c_struct_->length + c_struct_->offset) * sizeof(OffsetType);
                    OffsetType last_offset_value;
                    TURBO_RETURN_NOT_OK(MemoryManager::CopyBufferSliceToCPU(
                        data_->buffers[offsets_buffer_id], last_offset_value_offset, sizeof(OffsetType),
                        reinterpret_cast<uint8_t*>(&last_offset_value)));
                    // Compute visible size of buffer
                    buffer_size = byte_width * last_offset_value;
                }

                return ImportBuffer(buffer_id, buffer_size);
            }

            turbo::Status ImportBuffer(int32_t buffer_id, int64_t buffer_size,
                                       bool is_null_bitmap = false) {
                std::shared_ptr<Buffer> *out = &data_->buffers[buffer_id];
                auto data = reinterpret_cast<const uint8_t *>(c_struct_->buffers[buffer_id]);
                if (data != nullptr) {
                    if (memory_mgr_) {
                        *out = std::make_shared<ImportedBuffer>(data, buffer_size, memory_mgr_,
                                                                device_type_, import_);
                    } else {
                        *out = std::make_shared<ImportedBuffer>(data, buffer_size, import_);
                    }
                } else if (is_null_bitmap) {
                    out->reset();
                } else {
                    // Ensure that imported buffers are never null (except for the null bitmap)
                    if (buffer_size != 0) {
                        return turbo::invalid_argument_error(
                            "ArrowArrayStruct contains null data pointer "
                            "for a buffer with non-zero computed size");
                    }
                    *out = zero_size_buffer_;
                }
                return turbo::OkStatus();
            }

            struct ArrowArray *c_struct_;
            int64_t recursion_level_;
            const std::shared_ptr<DataType> &type_;

            std::shared_ptr<ImportedArrayData> import_;
            std::shared_ptr<ArrayData> data_;
            std::vector<ArrayImporter> child_importers_;

            // For imported null buffer pointers
            std::shared_ptr<Buffer> zero_size_buffer_;

            std::shared_ptr<MemoryManager> memory_mgr_;
            DeviceAllocationType device_type_{DeviceAllocationType::kCPU};
        };
    } // namespace

    turbo::Result<std::shared_ptr<Array> > ImportArray(struct ArrowArray *array,
                                                       std::shared_ptr<DataType> type) {
        ArrayImporter importer(type);
        TURBO_RETURN_NOT_OK(importer.Import(array));
        return importer.MakeArray();
    }

    turbo::Result<std::shared_ptr<Array> > ImportArray(struct ArrowArray *array,
                                                       struct ArrowSchema *type) {
        auto maybe_type = ImportType(type);
        if (!maybe_type.ok()) {
            ArrowArrayRelease(array);
            return maybe_type.status();
        }
        return ImportArray(array, *maybe_type);
    }

    turbo::Result<std::shared_ptr<RecordBatch> > ImportRecordBatch(struct ArrowArray *array,
                                                                   std::shared_ptr<Schema> schema) {
        auto type = STRUCT(schema->fields());
        ArrayImporter importer(type);
        TURBO_RETURN_NOT_OK(importer.Import(array));
        return importer.MakeRecordBatch(std::move(schema));
    }

    turbo::Result<std::shared_ptr<RecordBatch> > ImportRecordBatch(struct ArrowArray *array,
                                                                   struct ArrowSchema *schema) {
        auto maybe_schema = ImportSchema(schema);
        if (!maybe_schema.ok()) {
            ArrowArrayRelease(array);
            return maybe_schema.status();
        }
        return ImportRecordBatch(array, *maybe_schema);
    }

    turbo::Result<std::shared_ptr<MemoryManager> > DefaultDeviceMemoryMapper(
        ArrowDeviceType device_type, int64_t device_id) {
        TURBO_MOVE_OR_RAISE(auto mapper,
                            GetDeviceMapper(static_cast<DeviceAllocationType>(device_type)));
        return mapper(device_id);
    }

    turbo::Result<std::shared_ptr<Array> > ImportDeviceArray(struct ArrowDeviceArray *array,
                                                             std::shared_ptr<DataType> type,
                                                             const DeviceMemoryMapper &mapper) {
        ArrayImporter importer(type);
        TURBO_RETURN_NOT_OK(importer.Import(array, mapper));
        return importer.MakeArray();
    }

    turbo::Result<std::shared_ptr<Array> > ImportDeviceArray(struct ArrowDeviceArray *array,
                                                             struct ArrowSchema *type,
                                                             const DeviceMemoryMapper &mapper) {
        auto maybe_type = ImportType(type);
        if (!maybe_type.ok()) {
            ArrowArrayRelease(&array->array);
            return maybe_type.status();
        }
        return ImportDeviceArray(array, *maybe_type, mapper);
    }

    turbo::Result<std::shared_ptr<RecordBatch> > ImportDeviceRecordBatch(
        struct ArrowDeviceArray *array, std::shared_ptr<Schema> schema,
        const DeviceMemoryMapper &mapper) {
        auto type = STRUCT(schema->fields());
        ArrayImporter importer(type);
        TURBO_RETURN_NOT_OK(importer.Import(array, mapper));
        return importer.MakeRecordBatch(std::move(schema));
    }

    turbo::Result<std::shared_ptr<RecordBatch> > ImportDeviceRecordBatch(
        struct ArrowDeviceArray *array, struct ArrowSchema *schema,
        const DeviceMemoryMapper &mapper) {
        auto maybe_schema = ImportSchema(schema);
        if (!maybe_schema.ok()) {
            ArrowArrayRelease(&array->array);
            return maybe_schema.status();
        }
        return ImportDeviceRecordBatch(array, *maybe_schema, mapper);
    }

    //////////////////////////////////////////////////////////////////////////
    // C stream export

    namespace {
        turbo::Status ExportStreamSchema(const std::shared_ptr<RecordBatchReader> &src,
                                         struct ArrowSchema *out_schema) {
            return ExportSchema(*src->schema(), out_schema);
        }

        turbo::Status ExportStreamSchema(const std::shared_ptr<ChunkedArray> &src,
                                         struct ArrowSchema *out_schema) {
            return ExportType(*src->type(), out_schema);
        }

        turbo::Status ExportStreamNext(const std::shared_ptr<RecordBatchReader> &src, int64_t i,
                                       struct ArrowArray *out_array) {
            std::shared_ptr<RecordBatch> batch;
            TURBO_RETURN_NOT_OK(src->read_next(&batch));
            if (batch == nullptr) {
                // End of stream
                ArrowArrayMarkReleased(out_array);
                return turbo::OkStatus();
            } else {
                return ExportRecordBatch(*batch, out_array);
            }
        }

        // the int64_t i input here is unused, but exists simply to allow utilizing the
        // overload of this with the version for ChunkedArrays. If we removed the int64_t
        // from the signature despite it being unused, we wouldn't be able to leverage the
        // overloading in the templated exporters.
        turbo::Status ExportStreamNext(const std::shared_ptr<RecordBatchReader> &src, int64_t i,
                                       struct ArrowDeviceArray *out_array) {
            std::shared_ptr<RecordBatch> batch;
            TURBO_RETURN_NOT_OK(src->read_next(&batch));
            if (batch == nullptr) {
                // End of stream
                ArrowArrayMarkReleased(&out_array->array);
                return turbo::OkStatus();
            } else {
                return ExportDeviceRecordBatch(*batch, batch->GetSyncEvent(), out_array);
            }
        }

        turbo::Status ExportStreamNext(const std::shared_ptr<ChunkedArray> &src, int64_t i,
                                       struct ArrowArray *out_array) {
            if (i >= src->num_chunks()) {
                // End of stream
                ArrowArrayMarkReleased(out_array);
                return turbo::OkStatus();
            } else {
                return ExportArray(*src->chunk(static_cast<int>(i)), out_array);
            }
        }

        turbo::Status ExportStreamNext(const std::shared_ptr<ChunkedArray> &src, int64_t i,
                                       struct ArrowDeviceArray *out_array) {
            if (i >= src->num_chunks()) {
                // End of stream
                ArrowArrayMarkReleased(&out_array->array);
                return turbo::OkStatus();
            } else {
                return ExportDeviceArray(*src->chunk(static_cast<int>(i)), nullptr, out_array);
            }
        }

        template<typename T, bool IsDevice>
        class ExportedArrayStream {
            using StreamTraits =
            std::conditional_t<IsDevice, internal::ArrayDeviceStreamExportTraits,
                internal::ArrayStreamExportTraits>;
            using StreamType = typename StreamTraits::CType;
            using ArrayTraits = std::conditional_t<IsDevice, internal::ArrayDeviceExportTraits,
                internal::ArrayExportTraits>;
            using ArrayType = typename ArrayTraits::CType;

        public:
            struct PrivateData {
                explicit PrivateData(std::shared_ptr<T> reader)
                    : reader_(std::move(reader)), batch_num_(0) {
                }

                std::shared_ptr<T> reader_;
                int64_t batch_num_;
                std::string last_error_;

                PrivateData() = default;

                TURBO_DISALLOW_COPY_AND_ASSIGN(PrivateData);
            };

            explicit ExportedArrayStream(StreamType *stream) : stream_(stream) {
            }

            turbo::Status GetSchema(struct ArrowSchema *out_schema) {
                return ExportStreamSchema(reader(), out_schema);
            }

            turbo::Status GetNext(ArrayType *out_array) {
                return ExportStreamNext(reader(), next_batch_num(), out_array);
            }

            const char *GetLastError() {
                const auto &last_error = private_data()->last_error_;
                return last_error.empty() ? nullptr : last_error.c_str();
            }

            void Release() {
                if (StreamTraits::IsReleasedFunc(stream_)) {
                    return;
                }

                DKCHECK_NE(private_data(), nullptr);
                delete private_data();

                StreamTraits::MarkReleased(stream_);
            }

            // C-compatible callbacks

            static int StaticGetSchema(StreamType *stream, struct ArrowSchema *out_schema) {
                ExportedArrayStream self{stream};
                return self.ToCError(self.GetSchema(out_schema));
            }

            static int StaticGetNext(StreamType *stream, ArrayType *out_array) {
                ExportedArrayStream self{stream};
                return self.ToCError(self.GetNext(out_array));
            }

            static void StaticRelease(StreamType *stream) { ExportedArrayStream{stream}.Release(); }

            static const char *StaticGetLastError(StreamType *stream) {
                return ExportedArrayStream{stream}.GetLastError();
            }

            static turbo::Status Make(std::shared_ptr<T> reader, StreamType *out) {
                out->get_schema = ExportedArrayStream::StaticGetSchema;
                out->get_next = ExportedArrayStream::StaticGetNext;
                out->get_last_error = ExportedArrayStream::StaticGetLastError;
                out->release = ExportedArrayStream::StaticRelease;
                out->private_data = new ExportedArrayStream::PrivateData{std::move(reader)};
                return turbo::OkStatus();
            }

        private:
            int ToCError(const turbo::Status &status) {
                if (TURBO_LIKELY(status.ok())) {
                    private_data()->last_error_.clear();
                    return 0;
                }
                private_data()->last_error_ = status.to_string();
                switch (status.code()) {
                    case turbo::StatusCode::kIOError:
                        return EIO;
                    case turbo::StatusCode::kUnimplemented:
                        return ENOSYS;
                    case turbo::StatusCode::kResourceExhausted:
                        return ENOMEM;
                    default:
                        return EINVAL; // Fallback for Invalid, TypeError, etc.
                }
            }

            PrivateData *private_data() {
                return reinterpret_cast<PrivateData *>(stream_->private_data);
            }

            const std::shared_ptr<T> &reader() { return private_data()->reader_; }

            int64_t next_batch_num() { return private_data()->batch_num_++; }

            StreamType *stream_;
        };
    } // namespace

    turbo::Status ExportRecordBatchReader(std::shared_ptr<RecordBatchReader> reader,
                                          struct ArrowArrayStream *out) {
        memset(out, 0, sizeof(struct ArrowArrayStream));
        return ExportedArrayStream<RecordBatchReader, false>::Make(std::move(reader), out);
    }

    turbo::Status ExportChunkedArray(std::shared_ptr<ChunkedArray> chunked_array,
                                     struct ArrowArrayStream *out) {
        memset(out, 0, sizeof(struct ArrowArrayStream));
        return ExportedArrayStream<ChunkedArray, false>::Make(std::move(chunked_array), out);
    }

    turbo::Status ExportDeviceRecordBatchReader(std::shared_ptr<RecordBatchReader> reader,
                                                struct ArrowDeviceArrayStream *out) {
        memset(out, 0, sizeof(struct ArrowDeviceArrayStream));
        out->device_type = static_cast<ArrowDeviceType>(reader->device_type());
        return ExportedArrayStream<RecordBatchReader, true>::Make(std::move(reader), out);
    }

    turbo::Status ExportDeviceChunkedArray(std::shared_ptr<ChunkedArray> chunked_array,
                                           DeviceAllocationType device_type,
                                           struct ArrowDeviceArrayStream *out) {
        memset(out, 0, sizeof(struct ArrowDeviceArrayStream));
        out->device_type = static_cast<ArrowDeviceType>(device_type);
        return ExportedArrayStream<ChunkedArray, true>::Make(std::move(chunked_array), out);
    }

    //////////////////////////////////////////////////////////////////////////
    // C stream import

    namespace {
        template<bool IsDevice>
        class ArrayStreamReader {
        protected:
            using StreamTraits =
            std::conditional_t<IsDevice, internal::ArrayDeviceStreamExportTraits,
                internal::ArrayStreamExportTraits>;
            using StreamType = typename StreamTraits::CType;
            using ArrayTraits = std::conditional_t<IsDevice, internal::ArrayDeviceExportTraits,
                internal::ArrayExportTraits>;
            using ArrayType = typename ArrayTraits::CType;

        public:
            explicit ArrayStreamReader(StreamType *stream,
                                       const DeviceMemoryMapper mapper = DefaultDeviceMemoryMapper)
                : mapper_{std::move(mapper)} {
                StreamTraits::MoveFunc(stream, &stream_);
                DKCHECK(!StreamTraits::IsReleasedFunc(&stream_));
            }

            ~ArrayStreamReader() { ReleaseStream(); }

            void ReleaseStream() {
                // all our trait release funcs check IsReleased so we don't
                // need to repeat it here
                StreamTraits::ReleaseFunc(&stream_);
                DKCHECK(StreamTraits::IsReleasedFunc(&stream_));
            }

        protected:
            turbo::Status ReadNextArrayInternal(ArrayType *array) {
                ArrayTraits::MarkReleased(array);
                turbo::Status status = StatusFromCError(stream_.get_next(&stream_, array));
                if (!status.ok()) {
                    ArrayTraits::ReleaseFunc(array);
                }

                return status;
            }

            turbo::Result<std::shared_ptr<RecordBatch> > ImportRecordBatchInternal(
                struct ArrowArray *array, std::shared_ptr<Schema> schema) {
                return ImportRecordBatch(array, schema);
            }

            turbo::Result<std::shared_ptr<RecordBatch> > ImportRecordBatchInternal(
                struct ArrowDeviceArray *array, std::shared_ptr<Schema> schema) {
                return ImportDeviceRecordBatch(array, schema, mapper_);
            }

            turbo::Result<std::shared_ptr<Array> > ImportArrayInternal(
                struct ArrowArray *array, std::shared_ptr<nebula::DataType> type) {
                return ImportArray(array, type);
            }

            turbo::Result<std::shared_ptr<Array> > ImportArrayInternal(
                struct ArrowDeviceArray *array, std::shared_ptr<nebula::DataType> type) {
                return ImportDeviceArray(array, type, mapper_);
            }

            turbo::Result<std::shared_ptr<Schema> > ReadSchema() {
                struct ArrowSchema c_schema = {};
                TURBO_RETURN_NOT_OK(
                    StatusFromCError(&stream_, stream_.get_schema(&stream_, &c_schema)));
                TURBO_MOVE_OR_RAISE(auto schema, ImportSchema(&c_schema));
                return schema;
            }

            turbo::Result<std::shared_ptr<Field> > ReadField() {
                struct ArrowSchema c_schema = {};
                TURBO_RETURN_NOT_OK(
                    StatusFromCError(&stream_, stream_.get_schema(&stream_, &c_schema)));
                TURBO_MOVE_OR_RAISE(auto schema, ImportField(&c_schema));
                return schema;
            }

            turbo::Status CheckNotReleased() {
                if (StreamTraits::IsReleasedFunc(&stream_)) {
                    return turbo::invalid_argument_error(
                        "Attempt to read from a stream that has already been closed");
                }

                return turbo::OkStatus();
            }

            turbo::Status StatusFromCError(int errno_like) const {
                return StatusFromCError(&stream_, errno_like);
            }

            static turbo::Status StatusFromCError(StreamType *stream, int errno_like) {
                if (TURBO_LIKELY(errno_like == 0)) {
                    return turbo::OkStatus();
                }
                turbo::StatusCode code;
                switch (errno_like) {
                    case EDOM:
                    case EINVAL:
                    case ERANGE:
                        code = turbo::StatusCode::kInvalidArgument;
                        break;
                    case ENOMEM:
                        code = turbo::StatusCode::kResourceExhausted;
                        break;
                    case ENOSYS:
                        code = turbo::StatusCode::kUnimplemented;
                    default:
                        code = turbo::StatusCode::kIOError;
                        break;
                }
                const char *last_error = stream->get_last_error(stream);
                return {code, last_error ? std::string(last_error) : ""};
            }

            DeviceAllocationType get_device_type() const {
                if constexpr (IsDevice) {
                    return static_cast<DeviceAllocationType>(stream_.device_type);
                } else {
                    return DeviceAllocationType::kCPU;
                }
            }

        private:
            mutable StreamType stream_;
            const DeviceMemoryMapper mapper_;
        };

        template<bool IsDevice>
        class ArrayStreamBatchReader : public RecordBatchReader,
                                       public ArrayStreamReader<IsDevice> {
            using StreamTraits =
            std::conditional_t<IsDevice, internal::ArrayDeviceStreamExportTraits,
                internal::ArrayStreamExportTraits>;
            using StreamType = typename StreamTraits::CType;
            using ArrayTraits = std::conditional_t<IsDevice, internal::ArrayDeviceExportTraits,
                internal::ArrayExportTraits>;
            using ArrayType = typename ArrayTraits::CType;

        public:
            explicit ArrayStreamBatchReader(
                StreamType *stream, const DeviceMemoryMapper &mapper = DefaultDeviceMemoryMapper)
                : ArrayStreamReader<IsDevice>(stream, mapper) {
            }

            turbo::Status Init() {
                TURBO_MOVE_OR_RAISE(schema_, this->ReadSchema());
                return turbo::OkStatus();
            }

            std::shared_ptr<Schema> schema() const override { return schema_; }

            turbo::Status read_next(std::shared_ptr<RecordBatch> *batch) override {
                TURBO_RETURN_NOT_OK(this->CheckNotReleased());

                ArrayType c_array;
                TURBO_RETURN_NOT_OK(this->ReadNextArrayInternal(&c_array));

                if (ArrayTraits::IsReleasedFunc(&c_array)) {
                    // End of stream
                    batch->reset();
                    return turbo::OkStatus();
                } else {
                    return this->ImportRecordBatchInternal(&c_array, schema_).try_value(batch);
                }
            }

            turbo::Status close() override {
                this->ReleaseStream();
                return turbo::OkStatus();
            }

            DeviceAllocationType device_type() const override { return this->get_device_type(); }

        private:
            std::shared_ptr<Schema> schema_;
        };

        template<bool IsDevice>
        class ArrayStreamArrayReader : public ArrayStreamReader<IsDevice> {
            using StreamTraits =
            std::conditional_t<IsDevice, internal::ArrayDeviceStreamExportTraits,
                internal::ArrayStreamExportTraits>;
            using StreamType = typename StreamTraits::CType;
            using ArrayTraits = std::conditional_t<IsDevice, internal::ArrayDeviceExportTraits,
                internal::ArrayExportTraits>;
            using ArrayType = typename ArrayTraits::CType;

        public:
            explicit ArrayStreamArrayReader(
                StreamType *stream, const DeviceMemoryMapper &mapper = DefaultDeviceMemoryMapper)
                : ArrayStreamReader<IsDevice>(stream, mapper) {
            }

            turbo::Status Init() {
                TURBO_MOVE_OR_RAISE(field_, this->ReadField());
                return turbo::OkStatus();
            }

            std::shared_ptr<DataType> data_type() const { return field_->type(); }

            turbo::Status read_next(std::shared_ptr<Array> *array) {
                TURBO_RETURN_NOT_OK(this->CheckNotReleased());

                ArrayType c_array;
                TURBO_RETURN_NOT_OK(this->ReadNextArrayInternal(&c_array));

                if (ArrayTraits::IsReleasedFunc(&c_array)) {
                    // End of stream
                    array->reset();
                    return turbo::OkStatus();
                } else {
                    return this->ImportArrayInternal(&c_array, field_->type()).try_value(array);
                }
            }

        private:
            std::shared_ptr<Field> field_;
        };

        template<bool IsDevice, typename StreamTraits = std::conditional_t<
            IsDevice, internal::ArrayDeviceStreamExportTraits,
            internal::ArrayStreamExportTraits> >
        turbo::Result<std::shared_ptr<RecordBatchReader> > ImportReader(
            typename StreamTraits::CType *stream,
            const DeviceMemoryMapper &mapper = DefaultDeviceMemoryMapper) {
            if (StreamTraits::IsReleasedFunc(stream)) {
                return turbo::invalid_argument_error("Cannot import released Arrow Stream");
            }

            auto reader = std::make_shared<ArrayStreamBatchReader<IsDevice> >(stream, mapper);
            TURBO_RETURN_NOT_OK(reader->Init());
            return reader;
        }

        template<bool IsDevice, typename StreamTraits = std::conditional_t<
            IsDevice, internal::ArrayDeviceStreamExportTraits,
            internal::ArrayStreamExportTraits> >
        turbo::Result<std::shared_ptr<ChunkedArray> > ImportChunked(
            typename StreamTraits::CType *stream,
            const DeviceMemoryMapper &mapper = DefaultDeviceMemoryMapper) {
            if (StreamTraits::IsReleasedFunc(stream)) {
                return turbo::invalid_argument_error("Cannot import released Arrow Stream");
            }

            auto reader = std::make_shared<ArrayStreamArrayReader<IsDevice> >(stream, mapper);
            TURBO_RETURN_NOT_OK(reader->Init());

            auto data_type = reader->data_type();
            ArrayVector chunks;
            std::shared_ptr<Array> chunk;
            while (true) {
                TURBO_RETURN_NOT_OK(reader->read_next(&chunk));
                if (!chunk) {
                    break;
                }

                chunks.push_back(std::move(chunk));
            }

            reader->ReleaseStream();
            return ChunkedArray::create(std::move(chunks), std::move(data_type));
        }
    } // namespace

    turbo::Result<std::shared_ptr<RecordBatchReader> > ImportRecordBatchReader(
        struct ArrowArrayStream *stream) {
        return ImportReader</*IsDevice=*/false>(stream);
    }

    turbo::Result<std::shared_ptr<RecordBatchReader> > ImportDeviceRecordBatchReader(
        struct ArrowDeviceArrayStream *stream, const DeviceMemoryMapper &mapper) {
        return ImportReader</*IsDevice=*/true>(stream, mapper);
    }

    turbo::Result<std::shared_ptr<ChunkedArray> > ImportChunkedArray(
        struct ArrowArrayStream *stream) {
        return ImportChunked</*IsDevice=*/false>(stream);
    }

    turbo::Result<std::shared_ptr<ChunkedArray> > ImportDeviceChunkedArray(
        struct ArrowDeviceArrayStream *stream, const DeviceMemoryMapper &mapper) {
        return ImportChunked</*IsDevice=*/true>(stream, mapper);
    }
} // namespace nebula
