import pysam
import argparse
import sys
import os
from multiprocessing import Manager
import multiprocessing as mp
import gzip
import mmap
import pod5 as p5
import time
import datetime
import numpy as np
from ont_fast5_api.fast5_interface import get_fast5_file
from pathlib import Path
# from memory_profiler import profile
from deepsignal3.utils.process_utils import CIGAR2CODE
from deepsignal3.utils.process_utils import CIGAR_REGEX
from deepsignal3.utils.process_utils import fill_files_queue
from deepsignal3.utils import bam_reader
from deepsignal3.extract_features_pod5 import _group_signals_by_movetable_v2
import re
import traceback

import threading
short_number = 0
mv_unmapped = 0
lock = threading.Lock()
# import threading
# 定义终止事件
# event = threading.Event()


def _write_featurestr(write_fp, featurestr_q, time_wait=1, control=False):
    # print('write_process-{} starts'.format(os.getpid()))
    output = 0

    with open(write_fp, 'w') as wf:
        while True:
            # during test, it's ok without the sleep(time_wait)
            if featurestr_q.empty():
                time.sleep(time_wait)
                continue
            features_str = featurestr_q.get()
            if features_str == "kill":
                # print('output line {}'.format(output))
                # print('write_process-{} finished'.format(os.getpid()))
                break
            for one_features_str in features_str:
                output += 1
                wf.write(one_features_str + "\n")
                if control is True and output >= 4000:
                    # 设定终止标志
                    # event.set()
                    sys.exit(0)
            wf.flush()

##########
# process tsv and alignment to bam and pod5
##########


def process_dorado(bam_index, pod5_dr, read_ids_Q, output_Q, time_wait=1, d_batch_size=2, qsize_limit=22):
    fea_list = []
    global short_number
    global mv_unmapped
    while True:
        while read_ids_Q.empty():
            time.sleep(time_wait)
        read_data = read_ids_Q.get()
        if read_data == "kill":
            read_ids_Q.put("kill")
            break
        for read_name in read_data:
            while output_Q.qsize() >= qsize_limit:
                # print('Pausing bam and pod5 input due to OUTPUT queue size limit. Output_qsize=%d' %(output_Q.qsize()), flush=True)
                time.sleep(time_wait)
            read = pod5_dr.get_read(read_name)
            if read is None:
                continue
            signal = read.signal
            shift_dacs_to_pa = read.calibration.offset
            scale_dacs_to_pa = read.calibration.scale
            # read_iter=bam_index.find(read_name)
            try:
                for bam_read in bam_index.get_alignments(read_name):

                    reference_name = bam_read.reference_name
                    seq = bam_read.get_forward_sequence()
                    if seq is None or signal is None:
                        continue
                    if reference_name is None:
                        continue
                    if len(seq) < 10000:
                        lock.acquire()
                        short_number += 1
                        lock.release()
                        continue
                    read_dict = dict(bam_read.tags)

                    strand_code = 0 if bam_read.is_reverse else 1

                    strand = "-" if strand_code == 0 else "+"
                    # find_key=(read_name,reference_name)
                    ref_start = bam_read.reference_start

                    num_trimmed = read_dict["ts"]
                    shift_pa_to_norm = read_dict["sm"]
                    scale_pa_to_norm = read_dict["sd"]
                    mv_table = read_dict["mv"]
                    if num_trimmed >= 0:
                        # (signal[num_trimmed:] - norm_shift) / norm_scale
                        signal_trimmed = signal[num_trimmed:]
                    else:
                        # (signal[:num_trimmed] - norm_shift) / norm_scale
                        signal_trimmed = signal[:num_trimmed]
                    # sshift, sscale = np.mean(signal_trimmed), float(np.std(signal_trimmed))
                    # if sscale == 0.0:
                    #    norm_signals = signal_trimmed
                    # else:
                    #    norm_signals = (signal_trimmed - sshift) / sscale
                    signal_group = _group_signals_by_movetable_v2(
                        signal_trimmed, np.asarray(mv_table[1:]), int(mv_table[0]))
                    if len(signal_group) != len(seq):
                        lock.acquire()
                        mv_unmapped += 1
                        lock.release()
                        continue
                    signal_group_new = []
                    for sig in signal_group:
                        signal_group_new.append(
                            np.round(np.array(sig), decimals=6))

                    norm_signals_text = ';'.join(
                        [",".join([str(y) for y in x]) for x in signal_group_new])
                    pred_deepsignal_text = '.'

                    pred_dorado_text = '.'
                    mean_pred_text = '.'
                    pred_label_text = '.'
                    pred_pos = '.'
                    sample_id = '\t'.join(
                        [read_name, str(reference_name), str(ref_start)])
                    bisulfite_text = '.'
                    fea_str = '\t'.join([sample_id, seq, norm_signals_text, pred_pos, pred_dorado_text, pred_deepsignal_text, mean_pred_text, pred_label_text, str(bam_read.mapping_quality),
                                         str(shift_dacs_to_pa), str(scale_dacs_to_pa), str(shift_pa_to_norm), str(scale_pa_to_norm), bisulfite_text])
                    fea_list.append(fea_str)
                    if len(fea_list) >= d_batch_size:
                        output_Q.put(fea_list)
                        fea_list = []
            except Exception as e:
                traceback.print_exc()
    if len(fea_list) > 0:
        output_Q.put(fea_list)
    # print('short_number: {}'.format(short_number))
    # print('mv_unmapped: {}'.format(mv_unmapped))


def read_tsv(key_input):
    key_indexes = []
    with open(key_input, 'r') as input_file:
        for line in input_file:
            key = line.strip()
            key_indexes.append(key)
    return key_indexes


def extract(args):
    manager = mp.Manager()
    output_Q = manager.Queue()

    # bam_file=pysam.AlignmentFile(args.bam,'rb',check_sq=False,ignore_truncation=True)
    # print('%s: Building BAM index.' %str(datetime.datetime.now()), flush=True)
    # pysam.IndexedReads(bam_file,multiple_iterators=True)
    bam_index = bam_reader.ReadIndexedBam(args.bam)
    # bam_index.build()
    # print('%s: Finished building BAM index.' %str(datetime.datetime.now()), flush=True)
    signal_dir = os.path.abspath(args.signal)
    pod5_dr = p5.DatasetReader(signal_dir, recursive=True, index=True)
    if args.tsv is not None:
        ids = read_tsv(args.tsv)
    else:
        ids = list(pod5_dr.read_ids)
    read_ids_Q = manager.Queue()
    fill_files_queue(read_ids_Q, ids, 20, True)
    read_ids_Q.put("kill")

    # remove_last_line(input_path)
    # print('%s: Finished remove last line.' %str(datetime.datetime.now()), flush=True)
    # print('%s: Finished process file.' %str(datetime.datetime.now()), flush=True)
    #############
    timewait = args.timewait
    ex_dp = args.nproc - 1
    ex_procs = []
    for i in range(ex_dp):
        pb = mp.Process(target=process_dorado, args=(bam_index, pod5_dr, read_ids_Q, output_Q, timewait),
                        name="pb_reader")

        pb.daemon = True
        pb.start()
        ex_procs.append(pb)
    p_w = mp.Process(target=_write_featurestr, args=(args.write_path, output_Q, timewait, args.control),
                     name="writer")
    p_w.daemon = True
    p_w.start()
    # while args.control and not event.is_set():
    #    sys.exit(0)
    # read_ids_Q.put("kill")
    for pb in ex_procs:
        pb.join()
    output_Q.put("kill")
    p_w.join()
    global short_number
    global mv_unmapped
    print('short_number: {}'.format(short_number))
    print('mv_unmapped: {}'.format(mv_unmapped))


def parse_args():
    parser = argparse.ArgumentParser("")
    parser.add_argument("--tsv", type=str, required=False,
                        help='read ids of the reads want to extract')
    parser.add_argument("--bam", type=str, required=True)
    parser.add_argument("--pod5", action="store_true", default=False, required=False,
                        help='use pod5, default false')
    parser.add_argument("--bed", type=str, required=False)
    parser.add_argument("--signal", type=str, required=True)
    parser.add_argument("--write_path", type=str, required=True)
    parser.add_argument("--nproc", "-p", type=int, required=True)
    parser.add_argument("--timewait", "-t", default=0.1,
                        type=float, required=False)
    parser.add_argument("--d_batch_size", action="store", type=int, default=2,
                        required=False)
    parser.add_argument("--control", action="store_true", default=False, required=False,
                        help='test')

    return parser.parse_args()


def main():
    args = parse_args()
    extract(args)


if __name__ == '__main__':
    sys.exit(main())
