from functools import cached_property, reduce
import logging
import os
from pathlib import Path
from pydantic import BaseModel, Field
import polars as pl
from pyliftover import LiftOver
from pysam import AlignmentFile, VariantFile  # pylint: disable=E0611

logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logger = logging.getLogger("MTDD")


def convert_coordinate(liftover: LiftOver, pos: int) -> int:
    npos = liftover.convert_coordinate("chrM", pos)
    if not npos:
        raise ValueError(f"cannot covert coordinate of chrM:{pos}")
    return npos[0][1]


class DELLY(BaseModel):
    out_prefix: Path
    bam_file: Path
    reference_file: Path
    shift_back_chain_file: Path | None = Field(alias="shift_back_chain_file", default=None)
    min_size: int
    max_size: int
    supported_reads: int

    class Config:
        arbitrary_types_allowed = True

    @cached_property
    def depth_file(self) -> Path:
        return self.out_prefix.with_suffix(f"{self.out_prefix.suffix}.depth.txt")

    @cached_property
    def bcf_file(self) -> Path:
        return self.out_prefix.with_suffix(f"{self.out_prefix.suffix}.delly.bcf")

    @cached_property
    def vcf_file(self) -> Path:
        return self.bcf_file.with_suffix(".vcf")

    def do_delly(self):
        logger.info(f"DELLY - Running DELLY to {self.vcf_file}")
        if not self.bcf_file.exists():
            command = f"delly call -t DEL,DUP -g {self.reference_file} -o {self.bcf_file} {self.bam_file}"
            if os.system(command) > 0:
                raise RuntimeError(f"Failed to run {command}")
        with VariantFile(str(self.bcf_file)) as reader:
            header = reader.header.copy()
            header.info.remove_header("IMPRECISE")
            header.info.remove_header("PRECISE")
            header.info.remove_header("SVMETHOD")
            header.info.remove_header("PE")
            header.info.remove_header("SR")
            header.formats.clear_header()
            header.formats.add(id="SU", number=1, type="Integer", description="Number of pieces of evidence supporting the variant")
            header.formats.add(id="PE", number=1, type="Integer", description="Number of paired-end reads supporting the variant")
            header.formats.add(id="SR", number=1, type="Integer", description="Number of split reads supporting the variant")
            with open(self.vcf_file, "w") as writer:
                writer.write(str(header))
                for record in filter(lambda x: "PASS" in x.filter and self.min_size <= x.stop - x.start <= self.max_size, reader):
                    pe = record.info.pop("PE", 0)
                    sr = record.info.pop("SR", 0)
                    su = pe + sr
                    if su >= self.supported_reads:
                        record.ref = "<DIP>"
                        record.info.pop("IMPRECISE", None)
                        record.info.pop("PRECISE", None)
                        record.info.pop("SVMETHOD", None)
                        if self.shift_back_chain_file:
                            liftover = LiftOver(str(self.shift_back_chain_file))
                            lo_start = convert_coordinate(liftover, record.start)
                            lo_end = convert_coordinate(liftover, record.stop)
                            record.start, record.stop = min(lo_start, lo_end), max(lo_start, lo_end)
                        texts = str(record).split("\t")
                        texts[-2], texts[-1] = "SU:PE:SR", f"{su}:{pe}:{sr}"
                        writer.write("\t".join(texts) + "\n")

    def do_depth(self) -> pl.DataFrame:
        logger.info(f"BAM - Call depth to {self.depth_file}")
        if not self.depth_file.exists():
            with AlignmentFile(str(self.bam_file)) as reader:
                records = (
                    pl.DataFrame({"depth": reduce(lambda x, y: x + y, map(pl.Series, reader.count_coverage("chrM")))})
                    .with_row_index(name="pos", offset=1)
                    .with_columns(pl.lit("chrM").alias("chrom"))
                )
                if self.shift_back_chain_file:
                    liftover = LiftOver(str(self.shift_back_chain_file))
                    records = records.with_columns(pl.col("pos").map_elements(lambda x: convert_coordinate(liftover, x - 1), pl.UInt32))
                records.write_csv(self.depth_file, separator="\t")

    @classmethod
    def merge_vcf_files(cls, in_vcf_files: list[Path], out_vcf_file: Path):
        logger.info(f"VCF - Merge VCF files to {out_vcf_file}")
        variant_files = [VariantFile(str(vcf)) for vcf in in_vcf_files]
        with VariantFile(str(out_vcf_file), "w", header=variant_files[0].header.copy()) as writer:
            keys = set()
            for reader in variant_files:
                for record in reader:
                    key = f"{record.chrom}:{record.start}:{record.stop}:{record.alts[0]}"
                    if key not in keys:
                        writer.write(record)
                    keys.add(key)
                reader.close()

    @classmethod
    def merge_depth_files(cls, in_depth_files: list[Path], out_depth_file: Path):
        logger.info(f"DEPTH - Merge Depth files to {out_depth_file}")
        records = pl.read_csv(in_depth_files[0], separator="\t")
        for depth_file in in_depth_files[1:]:
            records = (
                records.join(pl.read_csv(depth_file, separator="\t"), on=["chrom", "pos"], how="left")
                .with_columns(pl.max_horizontal("depth", "depth_right").alias("depth"))
                .select(["chrom", "pos", "depth"])
            )
        records.write_csv(out_depth_file, separator="\t")
