/**
 * Copyright 2023-2024, XGBoost contributors
 *
 * \brief Common error message for various checks.
 */
#ifndef XGBOOST_COMMON_ERROR_MSG_H_
#define XGBOOST_COMMON_ERROR_MSG_H_

#include <cstdint>    // for uint64_t
#include <limits>     // for numeric_limits
#include <string>     // for string

#include "xgboost/base.h"     // for bst_feature_t
#include "xgboost/context.h"  // for Context
#include "xgboost/logging.h"
#include "xgboost/string_view.h"  // for StringView

namespace xgboost::error {
constexpr StringView GroupWeight() {
  return "Size of weight must equal to the number of query groups when ranking group is used.";
}

constexpr StringView GroupSize() {
  return "Invalid query group structure. The number of rows obtained from group doesn't equal to ";
}

constexpr StringView LabelScoreSize() {
  return "The size of label doesn't match the size of prediction.";
}

constexpr StringView InfInData() {
  return "Input data contains `inf` or a value too large, while `missing` is not set to `inf`";
}

constexpr StringView NoF128() {
  return "128-bit floating point is not supported on current platform.";
}

constexpr StringView InconsistentMaxBin() {
  return "Inconsistent `max_bin`. `max_bin` should be the same across different QuantileDMatrix, "
         "and consistent with the Booster being trained.";
}

constexpr StringView InvalidMaxBin() { return "`max_bin` must be equal to or greater than 2."; }

constexpr StringView UnknownDevice() { return "Unknown device type."; }

inline void MaxFeatureSize(std::uint64_t n_features) {
  auto max_n_features = std::numeric_limits<bst_feature_t>::max();
  CHECK_LE(n_features, max_n_features)
      << "Unfortunately, XGBoost does not support data matrices with "
      << std::numeric_limits<bst_feature_t>::max() << " features or greater";
}

constexpr StringView InplacePredictProxy() {
  return "Inplace predict accepts only DMatrixProxy as input.";
}

inline void MaxSampleSize(std::size_t n) {
  LOG(FATAL) << "Sample size too large for the current updater. Maximum number of samples:" << n
             << ". Consider using a different updater or tree_method.";
}

constexpr StringView OldSerialization() {
  return R"doc(If you are loading a serialized model (like pickle in Python, RDS in R) or
configuration generated by an older version of XGBoost, please export the model by calling
`Booster.save_model` from that version first, then load it back in current version. See:

    https://xgboost.readthedocs.io/en/stable/tutorials/saving_model.html

for more details about differences between saving model and serializing.
)doc";
}

inline void WarnOldSerialization() {
  // Display it once is enough. Otherwise this can be really verbose in distributed
  // environments.
  static thread_local bool logged{false};
  if (logged) {
    return;
  }
  LOG(WARNING) << OldSerialization();
  logged = true;
}

void WarnDeprecatedGPUHist();

void WarnManualUpdater();

void WarnDeprecatedGPUId();

void WarnEmptyDataset();

[[nodiscard]] std::string DeprecatedFunc(StringView old, StringView since, StringView replacement);

constexpr StringView InvalidCUDAOrdinal() {
  return "Invalid device. `device` is required to be CUDA and there must be at least one GPU "
         "available for using GPU.";
}

void MismatchedDevices(Context const* booster, Context const* data);

inline auto NoFederated() { return "XGBoost is not compiled with federated learning support."; }

inline auto NoCategorical(std::string name) {
  return name + " doesn't support categorical features.";
}

inline void NoPageConcat(bool concat_pages) {
  if (concat_pages) {
    LOG(FATAL) << "`extmem_single_page` must be false when there's no sampling or when it's "
                  "running on the CPU.";
  }
}

constexpr StringView InconsistentFeatureTypes() {
  return "Inconsistent feature types between batches.";
}

void CheckOldNccl(std::int32_t major, std::int32_t minor, std::int32_t patch);
}  // namespace xgboost::error
#endif  // XGBOOST_COMMON_ERROR_MSG_H_
