/*
 * Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
#include "tensorflow/core/framework/common_shape_fns.h"
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/shape_inference.h"
#include "tensorflow/core/framework/tensor.pb.h"

namespace tensorflow {
using shape_inference::DimensionHandle;
using shape_inference::InferenceContext;
using shape_inference::ShapeHandle;

REGISTER_OP("InitPartitionMap")
  .Input("ps_num: int32")
  .Input("ps_ids: int32")
  .Attr("partition_num: int = 65537")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("InitEmbeddingHashmap")
  .Input("table_id: int32")
  .Attr("bucket_size: int = 0")
  .Attr("value_total_len: int = 0")
  .Attr("dtype: {uint8, uint16, float32} = DT_FLOAT")
  .Attr("embedding_dim: int = 0")
  .Attr("initializer_mode: string = '' ")
  .Attr("constant_value: float = 0")
  .Attr("min: float = -2")
  .Attr("max: float = 2")
  .Attr("mu: float = 0")
  .Attr("sigma: float = 1")
  .Attr("seed: int = 0")
  .Attr("seed2: int = 0")
  .Attr("filter_mode: string = 'no_filter' ")
  .Attr("optimizer_mode: string = '' ")
  .Attr("optimizer_params: list(float) = [0.1]")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingTableImport")
  .Input("file_path: string")
  .Input("ps_id: int32")
  .Input("table_id: int32")
  .Input("global_step: int64")
  .Attr("embedding_dim: list(int)")
  .Attr("value_total_len: list(int)")
  .Attr("only_var_flag: bool = false")
  .Attr("file_type: string = 'bin' ")
  .Attr("table_name: list(string)")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingTableFind")
  .Input("table_id: int32")
  .Input("keys: int64")
  .Output("values: float32")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("default_value: list(float) = [-1]")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    ShapeHandle keys_shape;
    TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &keys_shape));
    std::vector<int32_t> embedding_dims;
    if (!c->GetAttr("embedding_dim", &embedding_dims).ok()) {
        return errors::InvalidArgument("Invalid embedding_dim");
    }
    c->set_output(0, c->Matrix(c->Dim(keys_shape, 0), embedding_dims[0]));
    return Status::OK();
  });

REGISTER_OP("UninitPartitionMap")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("UninitEmbeddingHashmap")
  .Input("table_id: int32")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("TableToResource")
  .Input("table_id: int32")
  .Output("table_handle: resource")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingTableFindAndInit")
  .Input("table_id: int32")
  .Input("keys: int64")
  .Output("values: float32")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("value_total_len: list(int) = [0]")
  .Attr("initializer_mode: list(string) = ['random_uniform']")
  .Attr("constant_value: list(float) = [0]")
  .Attr("min: list(float) = [-2]")
  .Attr("max: list(float) = [2]")
  .Attr("mu: list(float) = [0]")
  .Attr("sigma: list(float) = [1]")
  .Attr("seed: list(int) = [0]")
  .Attr("seed2: list(int) = [0]")
  .Attr("filter_mode: list(string) = ['no_filter']")
  .Attr("filter_freq: list(int) = [0]")
  .Attr("default_key_or_value: list(int) = [0]")
  .Attr("default_key: list(int) = [0]")
  .Attr("default_value: list(float) = [0]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("optimizer_mode: list(string) = [''] ")
  .Attr("optimizer_params: list(float) = [0.1]")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    ShapeHandle keys_shape;
    TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &keys_shape));
    std::vector<int32_t> embedding_dims;
    if (!c->GetAttr("embedding_dim", &embedding_dims).ok()) {
        return errors::InvalidArgument("Invalid embedding_dim");
    }
    c->set_output(0, c->Matrix(c->Dim(keys_shape, 0), embedding_dims[0]));
    return Status::OK();
  });

REGISTER_OP("FakeRemoteLookupUniqued")
  .Input("table_id: int32")
  .Input("keys: int64")
  .Input("actual_keys_input: int64")
  .Input("unique_indices: int32")
  .Input("key_count: int64")
  .Output("values: float32")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("value_total_len: list(int) = [0]")
  .Attr("initializer_mode: list(string) = ['random_uniform']")
  .Attr("constant_value: list(float) = [0]")
  .Attr("min: list(float) = [-2]")
  .Attr("max: list(float) = [2]")
  .Attr("mu: list(float) = [0]")
  .Attr("sigma: list(float) = [1]")
  .Attr("seed: list(int) = [0]")
  .Attr("seed2: list(int) = [0]")
  .Attr("filter_mode: list(string) = ['no_filter']")
  .Attr("filter_freq: list(int) = [0]")
  .Attr("default_key_or_value: list(int) = [0]")
  .Attr("default_key: list(int) = [0]")
  .Attr("default_value: list(float) = [0]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("optimizer_mode: list(string) = [''] ")
  .Attr("optimizer_params: list(float) = [0.1]")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    ShapeHandle keys_shape;
    TF_RETURN_IF_ERROR(c->WithRank(c->input(3), 1, &keys_shape));
    std::vector<int32_t> embedding_dims;
    if (!c->GetAttr("embedding_dim", &embedding_dims).ok()) {
        return errors::InvalidArgument("Invalid embedding_dim");
    }
    c->set_output(0, c->Matrix(c->Dim(keys_shape, 0), embedding_dims[0]));
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplyAdam")
  .Input("var_handle: resource")
  .Input("beta1_power: T")
  .Input("beta2_power: T")
  .Input("lr: T")
  .Input("beta1: T")
  .Input("beta2: T")
  .Input("epsilon: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplyAdamW")
  .Input("var_handle: resource")
  .Input("beta1_power: T")
  .Input("beta2_power: T")
  .Input("lr: T")
  .Input("weight_decay: T")
  .Input("beta1: T")
  .Input("beta2: T")
  .Input("epsilon: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("max_grad_norm: T")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("amsgrad: list(int) = [0]")
  .Attr("maximize: list(int) = [0]")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplyAdaGrad")
  .Input("var_handle: resource")
  .Input("lr: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplySgd")
  .Input("var_handle: resource")
  .Input("lr: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplyRmsprop")
  .Input("var_handle: resource")
  .Input("lr: T")
  .Input("rho: T")
  .Input("momentum: T")
  .Input("epsilon: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: numbertype")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingApplyFtrl")
  .Input("var_handle: resource")
  .Input("lr: T")
  .Input("lr_power: T")
  .Input("lambda1: T")
  .Input("lambda2: T")
  .Input("grad: T")
  .Input("keys: int64")
  .Input("global_step: Tstep")
  .Output("var_handle_output: resource")
  .Attr("embedding_dim: list(int) = [0]")
  .Attr("mask_zero: list(int) = [0]")
  .Attr("padding_key: list(int) = [0]")
  .Attr("padding_key_mask: list(int) = [1]")
  .Attr("completion_key: list(int) = [0]")
  .Attr("completion_key_mask: list(int) = [1]")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("ExponentialDecayLR")
  .Input("var_handle: resource")
  .Input("initial_learning_rate: T")
  .Input("decay_rate: T")
  .Input("decay_steps: Tstep")
  .Output("decayed_lr: T")
  .Attr("staircase: bool = false")
  .Attr("T: {float32, float16}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    auto data_shape = c->input(0);
    c->set_output(0, data_shape);
    return Status::OK();
  });

REGISTER_OP("EmbeddingComputeVarExport")
  .Input("file_path: string")
  .Input("ps_id: int32")
  .Input("table_id: int32")
  .Input("global_step: int64")
  .Attr("table_name: list(string)")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingComputeVarImport")
  .Input("file_path: string")
  .Input("ps_id: int32")
  .Input("table_id: int32")
  .Input("global_step: int64")
  .Attr("table_name: list(string)")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingTableExport")
  .Input("file_path: string")
  .Input("ps_id: int32")
  .Input("table_id: int32")
  .Input("global_step: Tstep")
  .Attr("embedding_dim: list(int)")
  .Attr("value_total_len: list(int)")
  .Attr("export_mode: {'all', 'old', 'new', 'specifiednew'} = 'all'")
  .Attr("only_var_flag: bool = false")
  .Attr("file_type: string = 'bin' ")
  .Attr("table_name: list(string)")
  .Attr("filter_export_flag: bool = false")
  .Attr("steps_to_live_list: list(int)")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingTableEvict")
  .Input("var_handle: T")
  .Input("global_step: Tstep")
  .Attr("steps_to_live: int = 0")
  .Attr("T: {resource, int32}")
  .Attr("Tstep: {int32, int64}")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingFeatureMapping")
  .Input("feature_id: int64")
  .Output("offset_id: int32")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->input(0));
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingV2")
  .Input("table_name: string")
  .Input("feature_id: int64")
  .Attr("table_total_size: list(int) = [1]")
  .Attr("table_actual_size: list(int) = [1]")
  .Output("offset_id: int32")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->input(1));
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingTableSize")
  .Input("table_name: string")
  .Output("feature_size: int64")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->Vector(c->UnknownDim()));
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingFind")
  .Input("table_name: string")
  .Input("feature_size: int64")
  .Output("feature_id: num * int64")
  .Output("offset_id: num * int32")
  .Attr("num: int >= 1")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    int64 num = 0;
    c->GetAttr("num", &num);
    for (int64_t i = 0; i < num; ++i) {
      c->set_output(i, c->Vector(c->UnknownDim()));
      c->set_output(i + num, c->Vector(c->UnknownDim()));
    }
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingExport")
  .Input("file_path: string")
  .Input("table_name: string")
  .Input("global_step: int64")
  .Input("values: float")
  .Input("feature_id: num * int64")
  .Input("offset_id: num * int32")
  .Attr("embedding_dim: list(int)")
  .Attr("num: int >= 1")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("EmbeddingFeatureMappingFileSize")
  .Input("file_path: string")
  .Input("table_name: string")
  .Input("global_step: int64")
  .Output("feature_size: int64")
  .Attr("embedding_dim: list(int)")
  .Attr("only_offset_flag: bool = True")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->Vector(c->UnknownDim()));
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingImport")
  .Input("file_path: string")
  .Input("table_name: string")
  .Input("feature_size: int64")
  .Input("global_step: int64")
  .Output("feature_id: num * int64")
  .Output("offset_id: num * int32")
  .Output("values: num * float")
  .Attr("embedding_dim: list(int)")
  .Attr("only_offset_flag: bool = True")
  .Attr("num: int >= 1")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    int64 num = 0;
    c->GetAttr("num", &num);
    for (int64_t i = 0; i < num; ++i) {
      c->set_output(i, c->Vector(c->UnknownDim()));
      c->set_output(i + num, c->Vector(c->UnknownDim()));
      c->set_output(i + 2 * num, c->Vector(c->UnknownDim()));
    }
    return Status::OK();
  });

REGISTER_OP("EmbeddingFeatureMappingInsert")
  .Input("table_name: string")
  .Input("feature_id: num * int64")
  .Input("offset_id: num * int32")
  .Attr("num: int >= 1")
  .SetShapeFn(shape_inference::NoOutputs);

REGISTER_OP("HostFeatureMapping")
  .Input("feature_id: int64")
  .Output("offset_id: int64")
  .Attr("threshold: int = 1")
  .Attr("table_name: string = 'default_table_name' ")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->input(0));
    return Status::OK();
  });

REGISTER_OP("FeatureMappingExport")
  .Input("path: string")
  .Attr("table_name_list: list(string)")
  .Output("export_fake_output: string")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->input(0));
    return Status::OK();
  });

REGISTER_OP("FeatureMappingImport")
  .Input("path: string")
  .Output("import_fake_output: string")
  .SetShapeFn([](shape_inference::InferenceContext *c) {
    c->set_output(0, c->input(0));
    return Status::OK();
  });
}  // namespace tensorflow
