################################################################################
#
# Copyright 2023 ByteDance Ltd. and/or its affiliates. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################


import inspect
from .distributed_optimizer import DistributedOptimizer


def initialize_optimizer_state(optimizer: DistributedOptimizer):
    optimizer._copy_model_grads_to_main_grads()
    orig_optimizer = optimizer.optimizer
    for group in orig_optimizer.param_groups:
        param_list = inspect.signature(orig_optimizer._init_group).parameters
        num_params = len(param_list)
        args = [group] + [[] for i in range(num_params - 1)]
        orig_optimizer._init_group(*args)
