#ifndef TNN_CORE_H
#define TNN_CORE_H

#include <memory>
#include <string>
#include <vector>
#include <map>

namespace TNN_NS {

// TNN状态码
enum StatusCode {
    TNN_OK = 0,
    TNN_ERROR = -1
};

// 设备类型
enum DeviceType {
    DEVICE_ARM = 0,
    DEVICE_GPU = 1
};

// 模型类型
enum ModelType {
    MODEL_TYPE_TNN = 0
};

// 精度类型
enum PrecisionType {
    PRECISION_AUTO = 0,
    PRECISION_HIGH = 1,
    PRECISION_LOW = 2
};

// 状态类
class Status {
public:
    Status(StatusCode code = TNN_OK, const std::string& desc = "") 
        : code_(code), description_(desc) {}
    
    StatusCode code() const { return code_; }
    std::string description() const { return description_; }
    
    bool operator==(StatusCode code) const { return code_ == code; }
    bool operator!=(StatusCode code) const { return code_ != code; }

private:
    StatusCode code_;
    std::string description_;
};

// 维度向量
struct DimsVector {
    std::vector<int> dims;
    
    DimsVector() = default;
    DimsVector(const std::vector<int>& d) : dims(d) {}
    
    int& operator[](size_t index) { return dims[index]; }
    const int& operator[](size_t index) const { return dims[index]; }
    size_t size() const { return dims.size(); }
};

// Blob描述
struct BlobDesc {
    DimsVector dims;
    std::string name;
    
    BlobDesc() = default;
    BlobDesc(const std::string& n, const DimsVector& d) : name(n), dims(d) {}
};

// 数据句柄
struct BlobHandle {
    void* base = nullptr;
    size_t bytes_offset = 0;
};

// Blob类
class Blob {
public:
    Blob(const BlobDesc& desc) : desc_(desc) {
        // 计算数据大小
        size_t total_size = 1;
        for (int dim : desc.dims.dims) {
            total_size *= dim;
        }
        data_.resize(total_size * sizeof(float));
        handle_.base = data_.data();
    }
    
    BlobDesc GetBlobDesc() const { return desc_; }
    BlobHandle GetHandle() const { return handle_; }

private:
    BlobDesc desc_;
    BlobHandle handle_;
    std::vector<uint8_t> data_;
};

// Blob映射
using BlobMap = std::map<std::string, std::shared_ptr<Blob>>;

// 模型配置
struct ModelConfig {
    ModelType model_type = MODEL_TYPE_TNN;
    std::vector<std::string> params;
};

// 网络配置
struct NetworkConfig {
    DeviceType device_type = DEVICE_ARM;
    PrecisionType precision = PRECISION_AUTO;
    std::string library_path;
};

// 前向声明
class Instance;

// TNN主类
class TNN {
public:
    TNN() = default;
    virtual ~TNN() = default;
    
    // 初始化TNN
    virtual Status Init(const ModelConfig& config);
    
    // 创建实例
    virtual std::shared_ptr<Instance> CreateInst(const NetworkConfig& config, Status& status);

private:
    ModelConfig model_config_;
};

// 实例类
class Instance {
public:
    Instance() = default;
    virtual ~Instance() = default;
    
    // 获取所有输入Blob
    virtual Status GetAllInputBlobs(BlobMap& blobs);
    
    // 获取所有输出Blob
    virtual Status GetAllOutputBlobs(BlobMap& blobs);
    
    // 前向推理
    virtual Status Forward();

private:
    BlobMap input_blobs_;
    BlobMap output_blobs_;
    bool initialized_ = false;
    
    void InitializeBlobs();
};

} // namespace TNN_NS

#endif // TNN_CORE_H
