from functools import cached_property, reduce
import argparse
import os
from typing import Any, ClassVar
from concurrent import futures
from pathlib import Path
from pydantic import create_model, Field
from pysam import VariantFile, VariantHeader, tabix_compress, tabix_index  # pylint: disable=E0611
from pre_base import set_parser, PrepareBase, logger


def create_gnomad_model():
    categories = ("AC", "AN", "AF", "nhomalt")
    populations = ("", "afr", "ami", "amr", "asj", "eas", "fin", "mid", "nfe", "sas")
    faf_populations = ("", "afr", "amr", "eas", "nfe", "sas")
    field_definitions = {}
    for c in categories:
        t = float if c == "AF" else int
        for p in populations:
            key1 = f"{c}_{p}" if p else c
            key2 = f"{c}_joint_{p}" if p else f"{c}_joint"
            field_definitions.update(
                {
                    f"gnomAD_{key1}": (t, Field(alias=key2, default=0)),
                    f"gnomAD_{key1}_XY": (t, Field(alias=f"{key2}_XY", default=0)),
                }
            )
    for p in faf_populations:
        key1 = f"FAF_{p}" if p else "FAF"
        key2 = f"faf95_joint_{p}" if p else "faf95_joint"
        field_definitions.update({f"gnomAD_{key1}": (float, Field(alias=key2, default=0))})
    return create_model("BaseGnomad", **field_definitions)


Gnomad = create_gnomad_model()


class PrepareGnomAD(PrepareBase):
    gnomad_schema: ClassVar[dict[str, Any]] = {n: f.annotation for n, f in Gnomad.model_fields.items()}
    input_dir: list[Path]

    def header(self, chrom: str, size: int, vcf_readers: list[VariantFile]) -> Any:
        info_keys = {field_info.alias: name for name, field_info in Gnomad.model_fields.items()}
        header = VariantHeader()
        header.contigs.add(id=chrom, length=size)
        for reader in vcf_readers:
            for key1, item in reader.header.info.items():
                key2 = info_keys.get(key1)
                if key2 and not header.info.get(key2):
                    header.info.add(id=key2, number=1, type=item.type, description=item.description)
        return header

    def process_chrom(self, chrom: str, size: int, intervals: list[tuple[int, int]]):
        vcf_files = reduce(lambda x, y: x + y, map(lambda z: list(z.glob(f"*.{chrom}.vcf.bgz")), self.input_dir))
        vcf_readers = list(map(lambda x: VariantFile(str(x)), vcf_files))
        output_vcf = self.outdir / f"{chrom}.vcf"
        output_gz = output_vcf.with_suffix(".vcf.gz")
        if not output_gz.exists():
            with open(output_vcf, "w") as writer:
                writer.write(str(self.header(chrom, size, vcf_readers)))
                for start, end in intervals:
                    exists = {}
                    for reader in vcf_readers:
                        for row in reader.fetch(chrom, start, end):
                            key = f"{row.chrom}:{row.pos}:{row.ref}:{row.alts[0]}"
                            if not exists.get(key):
                                exists[key] = True
                                gnomad = Gnomad.model_validate(dict(map(lambda x: (x[0], x[1][0] if isinstance(x[1], (list, tuple)) else x[1]), row.info.items())))
                                info = ";".join(list(map(lambda x: f"{x[0]}={x[1]}", gnomad.model_dump().items())))
                                writer.write(f"{row.chrom}\t{row.pos}\t.\t{row.ref}\t{row.alts[0]}\t.\t.\t{info}\n")
                        logger.info("Processed %s:%s:%s %s", chrom, f"{start:,}", f"{end:,}", Path(reader.filename.decode()).name)
            temp_dir = output_gz.parent / f"temp_{chrom}"
            temp_dir.mkdir(parents=True, exist_ok=True)
            command = f"bcftools sort {output_vcf} -T {temp_dir} -o {output_gz} -Oz"
            if os.system(command) > 0:
                raise RuntimeError(f"run command failed: {command}")
            command = f"bcftools index --tbi {output_gz}"
            if os.system(command) > 0:
                raise RuntimeError(f"run command failed: {command}")
            output_vcf.unlink(missing_ok=True)
        for reader in vcf_readers:
            reader.close()
        logger.info("processed %s", output_gz)

    def run(self):
        print(str(self.reference_file))
        self.outdir.mkdir(parents=True, exist_ok=True)
        with futures.ProcessPoolExecutor(max_workers=8) as executor:
            tasks = []
            for chrom, size, intervals in self.iter_reference():
                # self.process_chrom(chrom, size, intervals)
                tasks.append(executor.submit(self.process_chrom, chrom, size, intervals))
            for task in futures.as_completed(tasks):
                task.result()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Prepare gnomAD SNV")
    parser.add_argument("--input_dir", "-i", nargs="+", type=Path, required=True, help="input, gnomAD Exome BGZ direcotry")
    set_parser(parser=parser)
    parser.set_defaults(func=lambda args: PrepareGnomAD.model_validate(vars(args)).run())
    args = parser.parse_args()
    args.func(args)
