from functools import cached_property
import logging
from pathlib import Path
from typing_extensions import Self
from pydantic import model_validator
from pysam import VariantFile, VariantHeader, VariantRecord, tabix_compress, tabix_index  # pylint: disable=E0611
from .sequence import AcceptorSequence, DonorSequence
from .splicing import Splicing
from .transcript import Transcript
from .load_data import LoadData
from .pvs1 import PVS1
from .snv import SNV

logging.basicConfig(level=logging.ERROR, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logger = logging.getLogger("OpenPVS1")


class OpenPVS1(LoadData):
    input_files: list[Path]
    output_file: Path
    genome_file: Path

    @cached_property
    def outdir(self) -> Path:
        outdir = self.output_file.parent
        outdir.mkdir(parents=True, exist_ok=True)
        return outdir

    @cached_property
    def sample(self):
        return self.input_file.name.split(".")[0]

    @model_validator(mode="after")
    def load_data(self) -> Self:
        DonorSequence.Matrix = self.maxent_matrix5
        AcceptorSequence.Matrix = self.maxent_matrix3
        PVS1.GenomeFasta = Splicing.GenomeFasta = Transcript.GenomeFasta = self.genome_fasta
        PVS1.Domain = Splicing.Domain = self.domain_records
        PVS1.Hotspot = Splicing.Hotspot = self.hotspot_records
        PVS1.CuratedRegion = Splicing.CuratedRegion = self.curated_region_records
        PVS1.ExonLofPopmax = self.exon_lof_popmax_records
        PVS1.Pathogenic = self.pathogenic_records
        return self

    def calc_pvs1(self, record: VariantRecord) -> list[str]:
        pvs1_texts = []
        for snvs in SNV.from_variant_record(record):
            pvs1_list = []
            for snv in snvs:
                transcript = self.transcripts.get(f"{snv.chrom}:{snv.transcript}")
                if transcript:
                    pvs1_list.append(PVS1(snv=snv, transcript=transcript))
            pvs1_texts.append(str(sorted(pvs1_list, key=lambda x: -x.strength_raw.value)[0]) if pvs1_list else PVS1.default_str())
        return pvs1_texts

    def run(self):
        output_file = self.output_file.with_suffix("") if self.output_file.suffix == ".gz" else self.output_file
        output_file.parent.mkdir(parents=True, exist_ok=True)
        header = VariantHeader()
        variant_readers: list[VariantFile] = []
        for input_file in self.input_files:
            reader = VariantFile(str(input_file), "r")
            header.merge(reader.header)
            variant_readers.append(reader)
        if "AutoPVS1" not in header.info.keys():
            header.info.add(
                id="AutoPVS1",
                number=".",
                type="String",
                description="AutoPVS1,format=criterion|strength_raw|strength",
            )
        with open(output_file, "w") as writer:
            writer.write(str(header))
            for reader in variant_readers:
                reader.header.merge(header)
                logger.info("Calculate PVS1 for %s and write output to %s", reader.filename.decode(), self.output_file)
                # for record in reader.fetch("chr1", 171208950, 171208951):
                for record in reader:
                    pvs1_texts = self.calc_pvs1(record)
                    if pvs1_texts:
                        record.info.update({"AutoPVS1": pvs1_texts})
                    writer.write(str(record))
                reader.close()
        if self.output_file.suffix == ".gz":
            tabix_compress(str(output_file), str(self.output_file), force=True)
            tabix_index(str(self.output_file), preset="vcf", force=True)
            output_file.unlink(missing_ok=True)
