#include "OptimScheduler.h"

//******************************************************************
//FUNCTION:
float COptimScheduler::getLearningRate(int vStep) {
    float t = (std::max)((std::min)(static_cast<float>(vStep) / static_cast<float>(m_MaxSteps), 1.0f), 0.0f);
    return std::exp(std::log(m_LrInit) * (1.0f - t) + std::log(m_LrFinal) * t);
}

//******************************************************************
//FUNCTION:
void COptimScheduler::step(int vStep) {
    float lr = getLearningRate(vStep);
    static_cast<torch::optim::AdamOptions&>(m_pOpt->param_groups()[0].options()).set_lr(lr);
}