from dataclasses import dataclass
from typing import Iterable, Literal

from peft import AdaLoraConfig, LoraConfig, PeftMixedModel, PeftModel, TaskType, get_peft_model
from transformers import PreTrainedModel

from .finetune import FinetuneCallback, FinetuneHparams, apply_finetune
from .rewriting import TokenizedRewriting


@dataclass(kw_only=True)
class LoraHparams:
    type: Literal["lora", "adalora"]
    rank: int
    alpha: int
    dropout: float
    modules_name: Iterable[str]


def apply_lora(*,
    model: PreTrainedModel,
    rewritings: Iterable[TokenizedRewriting],
    lora_hparams: LoraHparams,
    finetune_hparams: FinetuneHparams,
    finetune_callback: FinetuneCallback | None = None,
) -> PeftModel | PeftMixedModel:
    model.config.use_cache = False
    model.supports_gradient_checkpointing = True
    model.gradient_checkpointing_enable()
    model.enable_input_require_grads()

    if hasattr(model, 'peft_config'):
        peft_model = model
    else:
        if lora_hparams.type == "lora":
            peft_config = LoraConfig(
                task_type=TaskType.CAUSAL_LM,
                inference_mode=False,
                r=lora_hparams.rank,
                lora_alpha=lora_hparams.alpha,
                lora_dropout=lora_hparams.dropout,
                target_modules=list(lora_hparams.modules_name))
        elif lora_hparams.type == "adalora":
            peft_config = AdaLoraConfig(
                task_type=TaskType.CAUSAL_LM,
                inference_mode=False,
                r=lora_hparams.rank,
                lora_alpha=lora_hparams.alpha,
                lora_dropout=lora_hparams.dropout,
                target_modules=list(lora_hparams.modules_name))
        else:
            raise NotImplementedError

        peft_model = get_peft_model(model, peft_config)

    peft_model.is_parallelizable = True
    peft_model.model_parallel = True

    apply_finetune(
        model=peft_model,
        rewritings=rewritings,
        hparams=finetune_hparams,
        callback=finetune_callback)

    return peft_model
