import torch
from transformers.modeling_outputs import BaseModelOutputWithPastAndCrossAttentions,BaseModelOutputWithPoolingAndCrossAttentions


# Define a forward_pre_hook for a module with parameters
def auto_device_adjustment_hook(module, inputs, kwargs):
    # inputs may be empty
    if not inputs:
        return inputs,kwargs
    input_device = inputs[0].device
    weight_device = next(module.parameters()).device
    # If inputs and weight are not on the same device
    if input_device != weight_device:
        # Send the keyword parameter to the device where weight resides
        kwargs=recursive_to_device(kwargs,weight_device)
        # Send inputs to the device where the weight is
        inputs=recursive_to_device(inputs,weight_device)
    # Record inputs for original device
    module._original_device=input_device
    return inputs,kwargs

# Define a forward_pre_hook for a module without parameters
def dropout_foward_pre_hook(module, inputs):
    # Record inputs for original device
    module._original_device=inputs[0].device

# Send data to the device
def recursive_to_device(data, device):
    if isinstance(data, torch.Tensor):
        return data.to(device)
    elif isinstance(data, tuple):
        return tuple(recursive_to_device(item, device) for item in data)
    elif isinstance(data, list):
        return [recursive_to_device(item, device) for item in data]
    elif isinstance(data, dict):
        # BaseModelOutputWithPastAndCrossAttentions is the type of the result that gpt2 returns 
        if isinstance(data,BaseModelOutputWithPastAndCrossAttentions):
            return  BaseModelOutputWithPastAndCrossAttentions({key: recursive_to_device(value, device) for key, value in data.items()})
        # BaseModelOutputWithPoolingAndCrossAttentionsis the type of the result that bert returns 
        if isinstance(data,BaseModelOutputWithPoolingAndCrossAttentions):
            return  BaseModelOutputWithPoolingAndCrossAttentions({key: recursive_to_device(value, device) for key, value in data.items()})
        return {key: recursive_to_device(value, device) for key, value in data.items()}
    else:
        return data

# Send the output of the module back to the original device
def forward_hook(module, inputs, outputs):
    if hasattr(module, '_original_device'):
        original_device = module._original_device
        # Use recursive functions to process the output
        outputs = recursive_to_device(outputs, original_device)
    return outputs

# Add hooks for each submodule of the model
def addHook(model):
    for name, module in model.named_modules():
        module.register_forward_hook(forward_hook)
        if hasattr(module, 'parameters') and len(list(module.parameters())) > 0:
            module.register_forward_pre_hook(auto_device_adjustment_hook,with_kwargs=True)
        else:
            module.register_forward_pre_hook(dropout_foward_pre_hook)
    return model
