import os, gzip, re, logging, json
from multiprocessing import Pool

logger = logging.getLogger('NextApp-ONT-QC')


class Fastq(object):
    def __init__(self):
        ''''''
        self.title = None
        self.quality = None
        self.sequence = None
        self.is_qual_fixed = False

    def read_seqid(self, line):
        match = re.match(re.compile(r'^@([\-\w]+)\s+(runid=[\-:#$%&=\s\w]+)$'), line)
        if match:
            name, description = match.group(1, 2)
            self.title = '%s %s' % (name, description)

    def read_seq_qual(self, line):
        if re.match(re.compile(r'^[TACGUNtacgun]+$'), line):
            self.sequence = line
        elif re.match(re.compile(r'^[%s]+$' % ''.join([chr(i) for i in range(1, 100)])), line):
            self.quality = ''
            qual_chr_set = {chr(i) for i in range(34, 73)}
            for qual_chr in line:
                if qual_chr not in qual_chr_set:
                    qual_chr = '#'
                    self.is_qual_fixed = True
                self.quality += qual_chr
        else:
            self.clear()

    @property
    def error(self):
        if self.title is None or self.sequence is None or self.quality is None:
            return 1
        if len(self.sequence) != len(self.quality):
            return 2
        return 0

    @property
    def out(self):
        return ('@%s\n' % '\n'.join((self.title, self.sequence, '+', self.quality))).encode()

    def clear(self):
        self.title, self.quality, self.sequence, self.is_qual_fixed = None, None, None, False


def __filter_fastq_single(fastq_file, outdir):
    tmp_fastq = os.path.join(outdir, 'tmp.%s.gz' % os.path.basename(fastq_file))
    tmp_stat_json = os.path.join(outdir, 'tmp.%s.json' % os.path.basename(fastq_file))
    if os.path.exists(tmp_stat_json) and os.path.exists(tmp_fastq):
        logging.info('skip %s' % tmp_fastq)
        return
    raw_count, cleaned_count, error_count, fixed_count = 0, 0, 0, 0
    fp_out = gzip.open(tmp_fastq, 'wb')
    fp = open(fastq_file)
    fastq = Fastq()
    line_count = 0
    for line in fp:
        line_count += 1
        line_info = '位于%s文件第%d行' % (fastq_file, line_count)
        line = line.strip()
        if not line: continue
        if line.startswith('@') and line.find('runid=') != -1:
            raw_count += 1
            if line_count == 1:
                fastq.read_seqid(line)
                continue
            error = fastq.error
            if error > 0:
                error_count += 1
                logger.error('不完整的reads信息 %s' % line_info if error == 1 else 'reads sequence与reads quality长度不一致, %s' % line_info)
            else:
                cleaned_count += 1
                fp_out.write(fastq.out)
            if fastq.is_qual_fixed: fixed_count += 1
            fastq.clear()
            fastq.read_seqid(line)
        else:
            if line == '+': continue
            fastq.read_seq_qual(line)
    fp.close()
    error = fastq.error
    if error > 0:
        error_count += 1
        if error == 1:
            logger.error('不完整的reads信息, 位于%s最后一行' % fastq_file)
        else:
            logger.error('reads sequence与reads quality长度不一致, 位于%s最后一行' % fastq_file)
    else:
        cleaned_count += 1
        fp_out.write(fastq.out)
    if fastq.is_qual_fixed: fixed_count += 1
    fp_out.close()
    fp_out = open(tmp_stat_json, 'w')
    fp_out.write(json.dumps({'raw': raw_count, 'clean': cleaned_count, 'error': error_count, 'fixed': fixed_count}))
    fp_out.close()


def __filter_fastq(fastq_files, out_fastq, thread):
    tmp_outdir = os.path.join(os.path.dirname(out_fastq), 'tmp_filter_fastq')
    if not os.path.exists(tmp_outdir): os.makedirs(tmp_outdir)
    pool = Pool(thread)
    for fastq_file in fastq_files:
        pool.apply_async(__filter_fastq_single, (fastq_file, tmp_outdir))
    pool.close()
    pool.join()
    stat = {'raw': 0, 'clean': 0, 'error': 0, 'fixed': 0}
    tmp_fastq_list = list()
    for filename in os.listdir(tmp_outdir):
        if not filename.startswith('tmp') or not filename.endswith('fastq.gz'): continue
        tmp_fastq_list.append(os.path.join(tmp_outdir, filename))
    os.system('cat %s >%s' % (' '.join(tmp_fastq_list), out_fastq))
    for filename in os.listdir(tmp_outdir):
        if not filename.startswith('tmp') or not filename.endswith('json'): continue
        fp = open(os.path.join(tmp_outdir, filename))
        data = json.loads(fp.read())
        for k, v in data.items():
            stat[k] += v
        fp.close()
    # os.system('rm -rf %s' % tmp_outdir)
    return stat


def run_filter_fastq(cell_dir, out_fastq, thread):
    fastq_files = list()
    for root, parent, filenames in os.walk(cell_dir):
        for filename in filenames:
            if not filename.endswith('fastq') and not filename.endswith('fq'): continue
            fastq_files.append(os.path.join(root, filename))
    return __filter_fastq(fastq_files, out_fastq, thread)
