// Copyright (C) Kumo inc. and its affiliates.
// Author: Jeff.li lijippy@163.com
// All rights reserved.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program.  If not, see <https://www.gnu.org/licenses/>.
//


#include <optional>

#include <pollux/testing/gtest_utils.h>
#include <pollux/functions/sparksql/tests/SparkFunctionBaseTest.h>

namespace kumo::pollux::functions::sparksql::test {
namespace {

class ElementAtTest : public SparkFunctionBaseTest {
 protected:
  template <typename T = int64_t>
  std::optional<T> elementAtSimple(
      const std::string& expression,
      const std::vector<VectorPtr>& parameters) {
    auto result =
        evaluate<SimpleVector<T>>(expression, make_row_vector(parameters));
    if (result->size() != 1) {
      throw std::invalid_argument(
          "elementAtSimple expects a single output row.");
    }
    if (result->is_null_at(0)) {
      return std::nullopt;
    }
    return result->value_at(0);
  }
};

} // namespace

// Spark's element_at ("a[1]") behavior:
// This behavior is only when spark.sql.ansi.enabled = false.
// #1 - start indices at 1. If Index is 0 will throw an error.
// #2 - allow out of bounds access for arrays (return null).
// #3 - allow negative indices (return elements from the last to the first).
TEST_F(ElementAtTest, allFlavors2) {
  auto array_vector = make_array_vector<int64_t>({{10, 11, 12}});

  // Create a simple vector containing a single map ([10=>10, 11=>11, 12=>12]).
  auto keyAt = [](auto idx) { return idx + 10; };
  auto sizeAt = [](auto) { return 3; };
  auto mapValueAt = [](auto idx) { return idx + 10; };
  auto map_vector =
      make_map_vector<int64_t, int64_t>(1, sizeAt, keyAt, mapValueAt);

  // #1
  EXPECT_EQ(elementAtSimple("element_at(C0, 1)", {array_vector}), 10);
  EXPECT_EQ(elementAtSimple("element_at(C0, 2)", {array_vector}), 11);
  EXPECT_EQ(elementAtSimple("element_at(C0, 3)", {array_vector}), 12);
  POLLUX_ASSERT_THROW(
      elementAtSimple("element_at(C0, 0)", {array_vector}),
      "SQL array indices start at 1");
  // #2
  EXPECT_EQ(elementAtSimple("element_at(C0, 4)", {array_vector}), std::nullopt);
  EXPECT_EQ(elementAtSimple("element_at(C0, 5)", {array_vector}), std::nullopt);
  EXPECT_EQ(elementAtSimple("element_at(C0, 1001)", {map_vector}), std::nullopt);

  // #3
  EXPECT_EQ(elementAtSimple("element_at(C0, -1)", {array_vector}), 12);
  EXPECT_EQ(elementAtSimple("element_at(C0, -2)", {array_vector}), 11);
  EXPECT_EQ(elementAtSimple("element_at(C0, -3)", {array_vector}), 10);
  EXPECT_EQ(elementAtSimple("element_at(C0, -4)", {array_vector}), std::nullopt);
}
} // namespace kumo::pollux::functions::sparksql::test
